From 08828ef60973fbfe5c8160d6c2d5ce2648ff941d Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Mon, 2 Sep 2013 00:09:59 +0100 Subject: [PATCH 001/256] Return time stamp and sequence number with video frame --- v4l2capture.c | 51 +++++++++++++++++++++++++++++++++++++++------------ 1 file changed, 39 insertions(+), 12 deletions(-) diff --git a/v4l2capture.c b/v4l2capture.c index dabd38c..e632b39 100644 --- a/v4l2capture.c +++ b/v4l2capture.c @@ -377,7 +377,7 @@ static PyObject *Video_device_queue_all_buffers(Video_device *self) Py_RETURN_NONE; } -static PyObject *Video_device_read_internal(Video_device *self, int queue) +static PyObject *Video_device_read_internal(Video_device *self, int queue, int return_timestamp) { if(!self->buffers) { @@ -444,22 +444,47 @@ static PyObject *Video_device_read_internal(Video_device *self, int queue) #undef CLAMP #endif + PyObject *out = result; + + if(return_timestamp) + { + out = PyTuple_New(4); + PyTuple_SetItem(out, 0, result); + PyTuple_SetItem(out, 1, PyInt_FromLong(buffer.timestamp.tv_sec)); + PyTuple_SetItem(out, 2, PyInt_FromLong(buffer.timestamp.tv_usec)); + PyTuple_SetItem(out, 3, PyInt_FromLong(buffer.sequence)); + } + if(queue && my_ioctl(self->fd, VIDIOC_QBUF, &buffer)) { return NULL; } - return result; + return out; } -static PyObject *Video_device_read(Video_device *self) +static PyObject *Video_device_read(Video_device *self, PyObject *args) { - return Video_device_read_internal(self, 0); + int return_timestamp=0; + + if(!PyArg_ParseTuple(args, "|i", &return_timestamp)) + { + return NULL; + } + + return Video_device_read_internal(self, 0, return_timestamp); } -static PyObject *Video_device_read_and_queue(Video_device *self) +static PyObject *Video_device_read_and_queue(Video_device *self, PyObject *args) { - return Video_device_read_internal(self, 1); + int return_timestamp=0; + + if(!PyArg_ParseTuple(args, "|i", &return_timestamp)) + { + return NULL; + } + + return Video_device_read_internal(self, 1, return_timestamp); } static PyMethodDef Video_device_methods[] = { @@ -499,14 +524,16 @@ static PyMethodDef Video_device_methods[] = { METH_NOARGS, "queue_all_buffers()\n\n" "Let the video device fill all buffers created."}, - {"read", (PyCFunction)Video_device_read, METH_NOARGS, - "read() -> string\n\n" + {"read", (PyCFunction)Video_device_read, METH_VARARGS, + "read(get_timestamp) -> string or tuple\n\n" "Reads image data from a buffer that has been filled by the video " - "device. The image data is in RGB och YUV420 format as decided by " + "device. The image data is in RGB or YUV420 format as decided by " "'set_format'. The buffer is removed from the queue. Fails if no buffer " - "is filled. Use select.select to check for filled buffers."}, - {"read_and_queue", (PyCFunction)Video_device_read_and_queue, METH_NOARGS, - "read_and_queue()\n\n" + "is filled. Use select.select to check for filled buffers. If " + "get_timestamp is true, a tuple is turned containing (sec, microsec, " + "sequence number)"}, + {"read_and_queue", (PyCFunction)Video_device_read_and_queue, METH_VARARGS, + "read_and_queue(get_timestamp)\n\n" "Same as 'read', but adds the buffer back to the queue so the video " "device can fill it again."}, {NULL} From b6ba872a7907783c0431c71e224382a8ba1c52a1 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Thu, 12 Sep 2013 20:01:18 +0100 Subject: [PATCH 002/256] Support more pixel formats --- v4l2capture.c | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/v4l2capture.c b/v4l2capture.c index e632b39..4f66cd7 100644 --- a/v4l2capture.c +++ b/v4l2capture.c @@ -15,6 +15,7 @@ #include #include #include +#include #ifdef USE_LIBV4L #include @@ -205,9 +206,9 @@ static PyObject *Video_device_set_format(Video_device *self, PyObject *args) { int size_x; int size_y; - int yuv420 = 0; + const char *fmt = NULL; - if(!PyArg_ParseTuple(args, "ii|i", &size_x, &size_y, &yuv420)) + if(!PyArg_ParseTuple(args, "ii|s", &size_x, &size_y, &fmt)) { return NULL; } @@ -217,12 +218,17 @@ static PyObject *Video_device_set_format(Video_device *self, PyObject *args) format.fmt.pix.width = size_x; format.fmt.pix.height = size_y; #ifdef USE_LIBV4L - format.fmt.pix.pixelformat = - yuv420 ? V4L2_PIX_FMT_YUV420 : V4L2_PIX_FMT_RGB24; + format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; + if(fmt != NULL && strcmp(fmt, "MJPEG")==0) + format.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; + if(fmt != NULL && strcmp(fmt, "RGB24")==0) + format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; + if(fmt != NULL && strcmp(fmt, "YUV420")==0) + format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420; #else format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; #endif - format.fmt.pix.field = V4L2_FIELD_INTERLACED; + format.fmt.pix.field = V4L2_FIELD_NONE; format.fmt.pix.bytesperline = 0; if(my_ioctl(self->fd, VIDIOC_S_FMT, &format)) From 3203f703d7b4b659140116ffc9f846c2d03311ce Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Fri, 13 Sep 2013 14:43:28 +0100 Subject: [PATCH 003/256] Update docstrings --- v4l2capture.c | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/v4l2capture.c b/v4l2capture.c index 4f66cd7..86838d5 100644 --- a/v4l2capture.c +++ b/v4l2capture.c @@ -507,11 +507,10 @@ static PyMethodDef Video_device_methods[] = { "set containing strings identifying the capabilities of the video " "device."}, {"set_format", (PyCFunction)Video_device_set_format, METH_VARARGS, - "set_format(size_x, size_y, yuv420 = 0) -> size_x, size_y\n\n" + "set_format(size_x, size_y, pixel_format='RGB24') -> size_x, size_y\n\n" "Request the video device to set image size and format. The device may " "choose another size than requested and will return its choice. The " - "image format will be RGB24 if yuv420 is false (default) or YUV420 if " - "yuv420 is true."}, + "pixel format may be either RGB24, YUV420 or MJPEG."}, {"set_fps", (PyCFunction)Video_device_set_fps, METH_VARARGS, "set_fps(fps) -> fps \n\n" "Request the video device to set frame per seconds.The device may " @@ -533,7 +532,7 @@ static PyMethodDef Video_device_methods[] = { {"read", (PyCFunction)Video_device_read, METH_VARARGS, "read(get_timestamp) -> string or tuple\n\n" "Reads image data from a buffer that has been filled by the video " - "device. The image data is in RGB or YUV420 format as decided by " + "device. The image data is in RGB24, YUV420 or MJPEG format as decided by " "'set_format'. The buffer is removed from the queue. Fails if no buffer " "is filled. Use select.select to check for filled buffers. If " "get_timestamp is true, a tuple is turned containing (sec, microsec, " From 057ca07598a4e314f180232141964bbe9b3eb3c0 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Thu, 17 Oct 2013 21:44:29 +0100 Subject: [PATCH 004/256] Create function to get current format --- v4l2capture.c | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/v4l2capture.c b/v4l2capture.c index 86838d5..7f376e4 100644 --- a/v4l2capture.c +++ b/v4l2capture.c @@ -16,6 +16,7 @@ #include #include #include +#include //Only used for debugging #ifdef USE_LIBV4L #include @@ -257,6 +258,19 @@ static PyObject *Video_device_set_fps(Video_device *self, PyObject *args) return Py_BuildValue("i",setfps.parm.capture.timeperframe.denominator); } +static PyObject *Video_device_get_format(Video_device *self) +{ + + struct v4l2_format format; + format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if(my_ioctl(self->fd, VIDIOC_G_FMT, &format)) + { + return NULL; + } + return Py_BuildValue("ii", format.fmt.pix.width, format.fmt.pix.height); + +} + static PyObject *Video_device_start(Video_device *self) { ASSERT_OPEN; @@ -511,6 +525,8 @@ static PyMethodDef Video_device_methods[] = { "Request the video device to set image size and format. The device may " "choose another size than requested and will return its choice. The " "pixel format may be either RGB24, YUV420 or MJPEG."}, + {"get_format", (PyCFunction)Video_device_get_format, METH_NOARGS, + "get_format() -> size_x, size_y\n\n"}, {"set_fps", (PyCFunction)Video_device_set_fps, METH_VARARGS, "set_fps(fps) -> fps \n\n" "Request the video device to set frame per seconds.The device may " From c1b5bd26a7ea39ffa37fce1747ea1017c120bfdf Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Thu, 17 Oct 2013 21:54:33 +0100 Subject: [PATCH 005/256] Return pixel format information --- v4l2capture.c | 27 ++++++++++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/v4l2capture.c b/v4l2capture.c index 7f376e4..6efb090 100644 --- a/v4l2capture.c +++ b/v4l2capture.c @@ -267,7 +267,32 @@ static PyObject *Video_device_get_format(Video_device *self) { return NULL; } - return Py_BuildValue("ii", format.fmt.pix.width, format.fmt.pix.height); + + PyObject *out = PyTuple_New(3); + PyTuple_SetItem(out, 0, PyInt_FromLong(format.fmt.pix.width)); + PyTuple_SetItem(out, 1, PyInt_FromLong(format.fmt.pix.height)); + + PyObject *pixFormatStr = NULL; + switch(format.fmt.pix.pixelformat) + { + case V4L2_PIX_FMT_MJPEG: + pixFormatStr = PyString_FromString("MJPEG"); + break; + case V4L2_PIX_FMT_RGB24: + pixFormatStr = PyString_FromString("RGB24"); + break; + case V4L2_PIX_FMT_YUV420: + pixFormatStr = PyString_FromString("YUV420"); + break; + case V4L2_PIX_FMT_YUYV: + pixFormatStr = PyString_FromString("YUYV"); + break; + default: + pixFormatStr = PyString_FromString("Unknown"); + break; + } + PyTuple_SetItem(out, 2, pixFormatStr); + return out; } From c5253ad36e22214241f2724831a776b627eefd5d Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Fri, 18 Oct 2013 02:58:17 +0100 Subject: [PATCH 006/256] Working toward huffman table insert code in c --- v4l2capture.c | 228 +++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 227 insertions(+), 1 deletion(-) diff --git a/v4l2capture.c b/v4l2capture.c index 6efb090..690d740 100644 --- a/v4l2capture.c +++ b/v4l2capture.c @@ -532,6 +532,231 @@ static PyObject *Video_device_read_and_queue(Video_device *self, PyObject *args) return Video_device_read_internal(self, 1, return_timestamp); } +// ********************************************************************* + +#define HUFFMAN_SEGMENT_LEN 420 + +const char huffmanSegment[HUFFMAN_SEGMENT_LEN+1] = + "\xFF\xC4\x01\xA2\x00\x00\x01\x05\x01\x01\x01\x01" + "\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x01\x02" + "\x03\x04\x05\x06\x07\x08\x09\x0A\x0B\x01\x00\x03" + "\x01\x01\x01\x01\x01\x01\x01\x01\x01\x00\x00\x00" + "\x00\x00\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09" + "\x0A\x0B\x10\x00\x02\x01\x03\x03\x02\x04\x03\x05" + "\x05\x04\x04\x00\x00\x01\x7D\x01\x02\x03\x00\x04" + "\x11\x05\x12\x21\x31\x41\x06\x13\x51\x61\x07\x22" + "\x71\x14\x32\x81\x91\xA1\x08\x23\x42\xB1\xC1\x15" + "\x52\xD1\xF0\x24\x33\x62\x72\x82\x09\x0A\x16\x17" + "\x18\x19\x1A\x25\x26\x27\x28\x29\x2A\x34\x35\x36" + "\x37\x38\x39\x3A\x43\x44\x45\x46\x47\x48\x49\x4A" + "\x53\x54\x55\x56\x57\x58\x59\x5A\x63\x64\x65\x66" + "\x67\x68\x69\x6A\x73\x74\x75\x76\x77\x78\x79\x7A" + "\x83\x84\x85\x86\x87\x88\x89\x8A\x92\x93\x94\x95" + "\x96\x97\x98\x99\x9A\xA2\xA3\xA4\xA5\xA6\xA7\xA8" + "\xA9\xAA\xB2\xB3\xB4\xB5\xB6\xB7\xB8\xB9\xBA\xC2" + "\xC3\xC4\xC5\xC6\xC7\xC8\xC9\xCA\xD2\xD3\xD4\xD5" + "\xD6\xD7\xD8\xD9\xDA\xE1\xE2\xE3\xE4\xE5\xE6\xE7" + "\xE8\xE9\xEA\xF1\xF2\xF3\xF4\xF5\xF6\xF7\xF8\xF9" + "\xFA\x11\x00\x02\x01\x02\x04\x04\x03\x04\x07\x05" + "\x04\x04\x00\x01\x02\x77\x00\x01\x02\x03\x11\x04" + "\x05\x21\x31\x06\x12\x41\x51\x07\x61\x71\x13\x22" + "\x32\x81\x08\x14\x42\x91\xA1\xB1\xC1\x09\x23\x33" + "\x52\xF0\x15\x62\x72\xD1\x0A\x16\x24\x34\xE1\x25" + "\xF1\x17\x18\x19\x1A\x26\x27\x28\x29\x2A\x35\x36" + "\x37\x38\x39\x3A\x43\x44\x45\x46\x47\x48\x49\x4A" + "\x53\x54\x55\x56\x57\x58\x59\x5A\x63\x64\x65\x66" + "\x67\x68\x69\x6A\x73\x74\x75\x76\x77\x78\x79\x7A" + "\x82\x83\x84\x85\x86\x87\x88\x89\x8A\x92\x93\x94" + "\x95\x96\x97\x98\x99\x9A\xA2\xA3\xA4\xA5\xA6\xA7" + "\xA8\xA9\xAA\xB2\xB3\xB4\xB5\xB6\xB7\xB8\xB9\xBA" + "\xC2\xC3\xC4\xC5\xC6\xC7\xC8\xC9\xCA\xD2\xD3\xD4" + "\xD5\xD6\xD7\xD8\xD9\xDA\xE2\xE3\xE4\xE5\xE6\xE7" + "\xE8\xE9\xEA\xF2\xF3\xF4\xF5\xF6\xF7\xF8\xF9\xFA"; + +int ReadJpegFrame(const unsigned char *data, unsigned offset, const unsigned char **twoBytesOut, unsigned *frameStartPosOut, unsigned *cursorOut) +{ + //Based on http://www.gdcl.co.uk/2013/05/02/Motion-JPEG.html + //and https://en.wikipedia.org/wiki/JPEG + + *twoBytesOut = NULL; + *frameStartPosOut = 0; + *cursorOut = 0; + unsigned cursor = offset; + //Check frame start + unsigned frameStartPos = offset; + const unsigned char *twoBytes = &data[cursor]; + + if (twoBytes[0] != 0xff) + { + //print "Error: found header", map(hex,twoBytes),"at position",cursor + return 0; + } + + cursor = 2 + cursor; + + //Handle padding + int paddingByte = (twoBytes[0] == 0xff && twoBytes[1] == 0xff); + if(paddingByte) + { + *twoBytesOut = twoBytes; + *frameStartPosOut = frameStartPos; + *cursorOut = cursor; + return 1; + } + + //Structure markers with 2 byte length + int markHeader = (twoBytes[0] == 0xff && twoBytes[1] >= 0xd0 && twoBytes[1] <= 0xd9); + if (markHeader) + { + *twoBytesOut = twoBytes; + *frameStartPosOut = frameStartPos; + *cursorOut = cursor; + return 1; + } + + //Determine length of compressed (entropy) data + int compressedDataStart = (twoBytes[0] == 0xff && twoBytes[1] == 0xda); + if (compressedDataStart) + { + unsigned sosLength = ((data[cursor] << 8) + data[cursor+1]); + cursor += sosLength; + + //Seek through frame + int run = 1; + while(run) + { + unsigned char byte = data[cursor]; + cursor += 1; + + if(byte == 0xff) + { + unsigned char byte2 = data[cursor]; + cursor += 1; + if(byte2 != 0x00) + { + if(byte2 >= 0xd0 && byte2 <= 0xd8) + { + //Found restart structure + //print hex(byte), hex(byte2) + } + else + { + //End of frame + run = 0; + cursor -= 2; + } + } + else + { + //Add escaped 0xff value in entropy data + } + } + else + { + + } + } + + *twoBytesOut = twoBytes; + *frameStartPosOut = frameStartPos; + *cursorOut = cursor; + return 1; + } + + //More cursor for all other segment types + unsigned segLength = (data[cursor] << 8) + data[cursor+1]; + cursor += segLength; + *twoBytesOut = twoBytes; + *frameStartPosOut = frameStartPos; + *cursorOut = cursor; + return 1; +} + +static PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) +{ + /* This converts an MJPEG frame into a standard JPEG binary + MJPEG images omit the huffman table if the standard table + is used. If it is missing, this function adds the table + into the file structure. */ + + if(PyTuple_Size(args) < 1) + { + PyErr_BadArgument(); + PyErr_Format(PyExc_TypeError, "Function requires 1 argument"); + return NULL; + } + + PyObject *inBuffer = PyTuple_GetItem(args, 0); + + if(!PyString_Check(inBuffer)) + { + PyErr_BadArgument(); + PyErr_Format(PyExc_TypeError, "Argument 1 must be a string."); + //PyObject* type = PyObject_Type(inBuffer); + //PyObject_Print(type, stdout, Py_PRINT_RAW); + //Py_CLEAR(type); + + return NULL; + } + + int parsing = 1; + int frameStartPos = 0; + int huffFound = 0; + unsigned char* inBufferPtr = PyString_AsString(inBuffer); + Py_ssize_t inBufferLen = PyString_Size(inBuffer); + + PyObject *outBuffer = PyString_FromString(""); + _PyString_Resize(&outBuffer, inBufferLen + HUFFMAN_SEGMENT_LEN); + + while(parsing) + { + //Check if we should stop + if (frameStartPos >= inBufferLen) + { + parsing = 0; + continue; + } + + //Read the next segment + const unsigned char *twoBytes = NULL; + unsigned frameStartPos=0, frameEndPos=0; + int ok = ReadJpegFrame(inBufferPtr, frameStartPos, &twoBytes, &frameStartPos, &frameEndPos); + //if(verbose) + // print map(hex, twoBytes), frameStartPos, frameEndPos; + + //Stop if there is a serious error + if(!ok) + { + parsing = 0; + continue; + } + + //Check if this segment is the compressed data + if(twoBytes[0] == 0xff && twoBytes[1] == 0xda && !huffFound) + { + PyObject *substr = PyString_FromStringAndSize(huffmanSegment, HUFFMAN_SEGMENT_LEN); + PyFile_WriteObject(substr, outBuffer, Py_PRINT_RAW); + Py_CLEAR(substr); + } + + //Check the type of frame + if(twoBytes[0] == 0xff && twoBytes[1] == 0xc4) + huffFound = 1; + + //Write current structure to output + PyObject *substr = PyString_FromStringAndSize(&inBufferPtr[frameStartPos], frameEndPos - frameStartPos); + PyFile_WriteObject(substr, outBuffer, Py_PRINT_RAW); + Py_CLEAR(substr); + + //Move cursor + frameStartPos = frameEndPos; + } + + return outBuffer; +} + +// ********************************************************************* + static PyMethodDef Video_device_methods[] = { {"close", (PyCFunction)Video_device_close, METH_NOARGS, "close()\n\n" @@ -597,7 +822,8 @@ static PyTypeObject Video_device_type = { }; static PyMethodDef module_methods[] = { - {NULL} + { "InsertHuffmanTable", (PyCFunction)InsertHuffmanTable, METH_VARARGS, NULL }, + { NULL, NULL, 0, NULL } }; PyMODINIT_FUNC initv4l2capture(void) From 9402b47f07b6a60f1c38d45340a8c0ff4baaf360 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Fri, 18 Oct 2013 03:28:49 +0100 Subject: [PATCH 007/256] Huffman table insert now works --- v4l2capture.c | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/v4l2capture.c b/v4l2capture.c index 690d740..791868f 100644 --- a/v4l2capture.c +++ b/v4l2capture.c @@ -2,6 +2,7 @@ // Python extension to capture video with video4linux2 // // 2009, 2010, 2011 Fredrik Portstrom +// 2013, Tim Sheerman-Chase // // I, the copyright holder of this file, hereby release it into the // public domain. This applies worldwide. In case this is not legally @@ -16,7 +17,6 @@ #include #include #include -#include //Only used for debugging #ifdef USE_LIBV4L #include @@ -700,13 +700,12 @@ static PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) } int parsing = 1; - int frameStartPos = 0; + unsigned frameStartPos = 0; int huffFound = 0; - unsigned char* inBufferPtr = PyString_AsString(inBuffer); + unsigned char* inBufferPtr = (unsigned char*)PyString_AsString(inBuffer); Py_ssize_t inBufferLen = PyString_Size(inBuffer); PyObject *outBuffer = PyString_FromString(""); - _PyString_Resize(&outBuffer, inBufferLen + HUFFMAN_SEGMENT_LEN); while(parsing) { @@ -719,8 +718,10 @@ static PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) //Read the next segment const unsigned char *twoBytes = NULL; - unsigned frameStartPos=0, frameEndPos=0; + unsigned frameEndPos=0; + int ok = ReadJpegFrame(inBufferPtr, frameStartPos, &twoBytes, &frameStartPos, &frameEndPos); + //if(verbose) // print map(hex, twoBytes), frameStartPos, frameEndPos; @@ -735,8 +736,7 @@ static PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) if(twoBytes[0] == 0xff && twoBytes[1] == 0xda && !huffFound) { PyObject *substr = PyString_FromStringAndSize(huffmanSegment, HUFFMAN_SEGMENT_LEN); - PyFile_WriteObject(substr, outBuffer, Py_PRINT_RAW); - Py_CLEAR(substr); + PyString_ConcatAndDel(&outBuffer, substr); } //Check the type of frame @@ -744,14 +744,13 @@ static PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) huffFound = 1; //Write current structure to output - PyObject *substr = PyString_FromStringAndSize(&inBufferPtr[frameStartPos], frameEndPos - frameStartPos); - PyFile_WriteObject(substr, outBuffer, Py_PRINT_RAW); - Py_CLEAR(substr); + PyObject *substr = PyString_FromStringAndSize((char *)&inBufferPtr[frameStartPos], frameEndPos - frameStartPos); + PyString_ConcatAndDel(&outBuffer, substr); //Move cursor frameStartPos = frameEndPos; } - + return outBuffer; } From 3d4653650b47e9f074ddb391a5e7d7223cff0182 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 20 Oct 2013 01:04:20 +0100 Subject: [PATCH 008/256] Add device manager object --- v4l2capture.c | 48 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/v4l2capture.c b/v4l2capture.c index 791868f..a67a2fe 100644 --- a/v4l2capture.c +++ b/v4l2capture.c @@ -48,6 +48,13 @@ typedef struct { int buffer_count; } Video_device; +typedef struct { + PyObject_HEAD + int fd; + struct buffer *buffers; + int buffer_count; +} Device_manager; + struct capability { int id; const char *name; @@ -756,6 +763,19 @@ static PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) // ********************************************************************* +static void Device_manager_dealloc(Device_manager *self) +{ + self->ob_type->tp_free((PyObject *)self); +} + +static int Device_manager_init(Device_manager *self, PyObject *args, + PyObject *kwargs) +{ + return 0; +} + +// ********************************************************************* + static PyMethodDef Video_device_methods[] = { {"close", (PyCFunction)Video_device_close, METH_NOARGS, "close()\n\n" @@ -820,6 +840,25 @@ static PyTypeObject Video_device_type = { (initproc)Video_device_init }; +// ********************************************************************* + +static PyMethodDef Device_manager_methods[] = { + {NULL} +}; + +static PyTypeObject Device_manager_type = { + PyObject_HEAD_INIT(NULL) + 0, "v4l2capture.Device_manager", sizeof(Device_manager), 0, + (destructor)Device_manager_dealloc, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, Py_TPFLAGS_DEFAULT, "Video_device(path)\n\nOpens the video device at " + "the given path and returns an object that can capture images. The " + "constructor and all methods except close may raise IOError.", 0, 0, 0, + 0, 0, 0, Device_manager_methods, 0, 0, 0, 0, 0, 0, 0, + (initproc)Device_manager_init +}; + +// ********************************************************************* + static PyMethodDef module_methods[] = { { "InsertHuffmanTable", (PyCFunction)InsertHuffmanTable, METH_VARARGS, NULL }, { NULL, NULL, 0, NULL } @@ -828,12 +867,18 @@ static PyMethodDef module_methods[] = { PyMODINIT_FUNC initv4l2capture(void) { Video_device_type.tp_new = PyType_GenericNew; + Device_manager_type.tp_new = PyType_GenericNew; if(PyType_Ready(&Video_device_type) < 0) { return; } + if(PyType_Ready(&Device_manager_type) < 0) + { + return; + } + PyObject *module = Py_InitModule3("v4l2capture", module_methods, "Capture video with video4linux2."); @@ -844,4 +889,7 @@ PyMODINIT_FUNC initv4l2capture(void) Py_INCREF(&Video_device_type); PyModule_AddObject(module, "Video_device", (PyObject *)&Video_device_type); + Py_INCREF(&Device_manager_type); + PyModule_AddObject(module, "Device_manager", (PyObject *)&Device_manager_type); + } From 4760bf2013c980bcb82159defc371fefe364973b Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 20 Oct 2013 02:54:43 +0100 Subject: [PATCH 009/256] Construct a video device --- v4l2capture.c | 51 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/v4l2capture.c b/v4l2capture.c index a67a2fe..f0bd6db 100644 --- a/v4l2capture.c +++ b/v4l2capture.c @@ -41,6 +41,8 @@ struct buffer { size_t length; }; +static PyTypeObject Device_manager_type; + typedef struct { PyObject_HEAD int fd; @@ -774,6 +776,52 @@ static int Device_manager_init(Device_manager *self, PyObject *args, return 0; } +static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) +// self, dev = None, reqSize=(640, 480), reqFps = 30, fmt = "MJPEG"): +{ + //Process arguments + const char *devarg = NULL; + if(PyTuple_Size(args) < 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + else + { + devarg = "/dev/video0"; + } + + //Open the video device. + PyObject *arglist = Py_BuildValue("(s)", devarg); + PyObject *obj = PyObject_CallObject((PyObject *) &Device_manager_type, arglist); + Py_DECREF(arglist); + Py_DECREF(obj); + /* + //Suggest an image size to the device. The device may choose and + //return another size if it doesn't support the suggested one. + self.video.set_format(reqSize[0], reqSize[1], fmt) + + //Query current pixel format + self.size_x, self.size_y, self.pixelFmt = self.video.get_format() + + //Set target frames per second + self.fps = self.video.set_fps(reqFps) + + // Create a buffer to store image data in. This must be done before + // calling 'start' if v4l2capture is compiled with libv4l2. Otherwise + // raises IOError. + self.video.create_buffers(10) + + // Send the buffer to the device. Some devices require this to be done + // before calling 'start'. + self.video.queue_all_buffers() + + // Start the device. This lights the LED if it's a camera that has one. + self.video.start()*/ + + Py_RETURN_NONE; +} + // ********************************************************************* static PyMethodDef Video_device_methods[] = { @@ -843,6 +891,9 @@ static PyTypeObject Video_device_type = { // ********************************************************************* static PyMethodDef Device_manager_methods[] = { + {"start", (PyCFunction)Device_manager_Start, METH_VARARGS, + "start()\n\n" + "Start video capture."}, {NULL} }; From 5464d801e5043e13203ba9b0fe406eb802c499d7 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 20 Oct 2013 03:11:32 +0100 Subject: [PATCH 010/256] Move to use C++ --- MANIFEST | 2 +- setup.py | 6 +++--- v4l2capture.c => v4l2capture.cpp | 33 +++++++++++++++++++++----------- 3 files changed, 26 insertions(+), 15 deletions(-) rename v4l2capture.c => v4l2capture.cpp (97%) diff --git a/MANIFEST b/MANIFEST index 136c8d9..9bbff92 100644 --- a/MANIFEST +++ b/MANIFEST @@ -3,4 +3,4 @@ capture_picture.py capture_picture_delayed.py list_devices.py setup.py -v4l2capture.c +v4l2capture.cpp diff --git a/setup.py b/setup.py index 8f08f8d..66ff7b5 100755 --- a/setup.py +++ b/setup.py @@ -14,7 +14,7 @@ setup( name = "v4l2capture", version = "1.4", - author = "Fredrik Portstrom", + author = "Fredrik Portstrom, Tim Sheerman-Chase", author_email = "fredrik@jemla.se", url = "http://fredrik.jemla.eu/v4l2capture", description = "Capture video with video4linux2", @@ -23,6 +23,6 @@ license = "Public Domain", classifiers = [ "License :: Public Domain", - "Programming Language :: C"], + "Programming Language :: C++"], ext_modules = [ - Extension("v4l2capture", ["v4l2capture.c"], libraries = ["v4l2"])]) + Extension("v4l2capture", ["v4l2capture.cpp"], libraries = ["v4l2"])]) diff --git a/v4l2capture.c b/v4l2capture.cpp similarity index 97% rename from v4l2capture.c rename to v4l2capture.cpp index f0bd6db..c873cbd 100644 --- a/v4l2capture.c +++ b/v4l2capture.cpp @@ -17,6 +17,8 @@ #include #include #include +#include +#include #ifdef USE_LIBV4L #include @@ -41,8 +43,6 @@ struct buffer { size_t length; }; -static PyTypeObject Device_manager_type; - typedef struct { PyObject_HEAD int fd; @@ -52,9 +52,9 @@ typedef struct { typedef struct { PyObject_HEAD - int fd; - struct buffer *buffers; - int buffer_count; + std::map fd; + std::map buffers; + std::map buffer_count; } Device_manager; struct capability { @@ -191,7 +191,7 @@ static PyObject *Video_device_get_info(Video_device *self) struct capability *capability = capabilities; - while((void *)capability < (void *)capabilities + sizeof(capabilities)) + while(capability < (struct capability *)(capabilities + sizeof(capabilities))) { if(caps.capabilities & capability->id) { @@ -366,7 +366,7 @@ static PyObject *Video_device_create_buffers(Video_device *self, PyObject *args) return NULL; } - self->buffers = malloc(reqbuf.count * sizeof(struct buffer)); + self->buffers = (struct buffer *)malloc(reqbuf.count * sizeof(struct buffer)); if(!self->buffers) { @@ -374,7 +374,7 @@ static PyObject *Video_device_create_buffers(Video_device *self, PyObject *args) return NULL; } - int i; + unsigned int i; for(i = 0; i < reqbuf.count; i++) { @@ -451,7 +451,7 @@ static PyObject *Video_device_read_internal(Video_device *self, int queue, int r #ifdef USE_LIBV4L PyObject *result = PyString_FromStringAndSize( - self->buffers[buffer.index].start, buffer.bytesused); + (const char*)self->buffers[buffer.index].start, buffer.bytesused); if(!result) { @@ -792,10 +792,21 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) } //Open the video device. - PyObject *arglist = Py_BuildValue("(s)", devarg); + /*PyObject *arglist = Py_BuildValue("(s)", devarg); PyObject *obj = PyObject_CallObject((PyObject *) &Device_manager_type, arglist); Py_DECREF(arglist); - Py_DECREF(obj); + Py_DECREF(obj);*/ + int fd = v4l2_open(devarg, O_RDWR | O_NONBLOCK); + + if(fd < 0) + { + PyErr_SetFromErrnoWithFilename(PyExc_IOError, devarg); + Py_RETURN_NONE; + } + + //self->fd = fd; + //self->buffers = NULL; + /* //Suggest an image size to the device. The device may choose and //return another size if it doesn't support the suggested one. From 4bd6a57621f5c4e41335a62ffefd9e0fa9e93db3 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 20 Oct 2013 03:27:04 +0100 Subject: [PATCH 011/256] Create buffers --- v4l2capture.cpp | 928 ++++++++++++++++++++++++++---------------------- 1 file changed, 497 insertions(+), 431 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index c873cbd..823d757 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -32,239 +32,239 @@ #endif #define ASSERT_OPEN if(self->fd < 0) \ - { \ - PyErr_SetString(PyExc_ValueError, \ - "I/O operation on closed file"); \ - return NULL; \ - } + { \ + PyErr_SetString(PyExc_ValueError, \ + "I/O operation on closed file"); \ + return NULL; \ + } struct buffer { - void *start; - size_t length; + void *start; + size_t length; }; typedef struct { - PyObject_HEAD - int fd; - struct buffer *buffers; - int buffer_count; + PyObject_HEAD + int fd; + struct buffer *buffers; + int buffer_count; } Video_device; typedef struct { - PyObject_HEAD - std::map fd; - std::map buffers; - std::map buffer_count; + PyObject_HEAD + std::map fd; + std::map buffers; + std::map buffer_count; } Device_manager; struct capability { - int id; - const char *name; + int id; + const char *name; }; static struct capability capabilities[] = { - { V4L2_CAP_ASYNCIO, "asyncio" }, - { V4L2_CAP_AUDIO, "audio" }, - { V4L2_CAP_HW_FREQ_SEEK, "hw_freq_seek" }, - { V4L2_CAP_RADIO, "radio" }, - { V4L2_CAP_RDS_CAPTURE, "rds_capture" }, - { V4L2_CAP_READWRITE, "readwrite" }, - { V4L2_CAP_SLICED_VBI_CAPTURE, "sliced_vbi_capture" }, - { V4L2_CAP_SLICED_VBI_OUTPUT, "sliced_vbi_output" }, - { V4L2_CAP_STREAMING, "streaming" }, - { V4L2_CAP_TUNER, "tuner" }, - { V4L2_CAP_VBI_CAPTURE, "vbi_capture" }, - { V4L2_CAP_VBI_OUTPUT, "vbi_output" }, - { V4L2_CAP_VIDEO_CAPTURE, "video_capture" }, - { V4L2_CAP_VIDEO_OUTPUT, "video_output" }, - { V4L2_CAP_VIDEO_OUTPUT_OVERLAY, "video_output_overlay" }, - { V4L2_CAP_VIDEO_OVERLAY, "video_overlay" } + { V4L2_CAP_ASYNCIO, "asyncio" }, + { V4L2_CAP_AUDIO, "audio" }, + { V4L2_CAP_HW_FREQ_SEEK, "hw_freq_seek" }, + { V4L2_CAP_RADIO, "radio" }, + { V4L2_CAP_RDS_CAPTURE, "rds_capture" }, + { V4L2_CAP_READWRITE, "readwrite" }, + { V4L2_CAP_SLICED_VBI_CAPTURE, "sliced_vbi_capture" }, + { V4L2_CAP_SLICED_VBI_OUTPUT, "sliced_vbi_output" }, + { V4L2_CAP_STREAMING, "streaming" }, + { V4L2_CAP_TUNER, "tuner" }, + { V4L2_CAP_VBI_CAPTURE, "vbi_capture" }, + { V4L2_CAP_VBI_OUTPUT, "vbi_output" }, + { V4L2_CAP_VIDEO_CAPTURE, "video_capture" }, + { V4L2_CAP_VIDEO_OUTPUT, "video_output" }, + { V4L2_CAP_VIDEO_OUTPUT_OVERLAY, "video_output_overlay" }, + { V4L2_CAP_VIDEO_OVERLAY, "video_overlay" } }; static int my_ioctl(int fd, int request, void *arg) { - // Retry ioctl until it returns without being interrupted. + // Retry ioctl until it returns without being interrupted. - for(;;) - { - int result = v4l2_ioctl(fd, request, arg); + for(;;) + { + int result = v4l2_ioctl(fd, request, arg); - if(!result) + if(!result) { - return 0; + return 0; } - if(errno != EINTR) + if(errno != EINTR) { - PyErr_SetFromErrno(PyExc_IOError); - return 1; + PyErr_SetFromErrno(PyExc_IOError); + return 1; } - } + } } static void Video_device_unmap(Video_device *self) { - int i; + int i; - for(i = 0; i < self->buffer_count; i++) - { - v4l2_munmap(self->buffers[i].start, self->buffers[i].length); - } + for(i = 0; i < self->buffer_count; i++) + { + v4l2_munmap(self->buffers[i].start, self->buffers[i].length); + } } static void Video_device_dealloc(Video_device *self) { - if(self->fd >= 0) - { - if(self->buffers) + if(self->fd >= 0) + { + if(self->buffers) { - Video_device_unmap(self); + Video_device_unmap(self); } - v4l2_close(self->fd); - } + v4l2_close(self->fd); + } - self->ob_type->tp_free((PyObject *)self); + self->ob_type->tp_free((PyObject *)self); } static int Video_device_init(Video_device *self, PyObject *args, - PyObject *kwargs) + PyObject *kwargs) { - const char *device_path; + const char *device_path; - if(!PyArg_ParseTuple(args, "s", &device_path)) - { - return -1; - } + if(!PyArg_ParseTuple(args, "s", &device_path)) + { + return -1; + } - int fd = v4l2_open(device_path, O_RDWR | O_NONBLOCK); + int fd = v4l2_open(device_path, O_RDWR | O_NONBLOCK); - if(fd < 0) - { - PyErr_SetFromErrnoWithFilename(PyExc_IOError, (char *)device_path); - return -1; - } + if(fd < 0) + { + PyErr_SetFromErrnoWithFilename(PyExc_IOError, (char *)device_path); + return -1; + } - self->fd = fd; - self->buffers = NULL; - return 0; + self->fd = fd; + self->buffers = NULL; + return 0; } static PyObject *Video_device_close(Video_device *self) { - if(self->fd >= 0) - { - if(self->buffers) + if(self->fd >= 0) + { + if(self->buffers) { - Video_device_unmap(self); + Video_device_unmap(self); } - v4l2_close(self->fd); - self->fd = -1; - } + v4l2_close(self->fd); + self->fd = -1; + } - Py_RETURN_NONE; + Py_RETURN_NONE; } static PyObject *Video_device_fileno(Video_device *self) { - ASSERT_OPEN; - return PyInt_FromLong(self->fd); + ASSERT_OPEN; + return PyInt_FromLong(self->fd); } static PyObject *Video_device_get_info(Video_device *self) { - ASSERT_OPEN; - struct v4l2_capability caps; + ASSERT_OPEN; + struct v4l2_capability caps; - if(my_ioctl(self->fd, VIDIOC_QUERYCAP, &caps)) - { - return NULL; - } + if(my_ioctl(self->fd, VIDIOC_QUERYCAP, &caps)) + { + return NULL; + } - PyObject *set = PySet_New(NULL); + PyObject *set = PySet_New(NULL); - if(!set) - { - return NULL; - } + if(!set) + { + return NULL; + } - struct capability *capability = capabilities; + struct capability *capability = capabilities; - while(capability < (struct capability *)(capabilities + sizeof(capabilities))) - { - if(caps.capabilities & capability->id) + while(capability < (struct capability *)(capabilities + sizeof(capabilities))) + { + if(caps.capabilities & capability->id) { - PyObject *s = PyString_FromString(capability->name); + PyObject *s = PyString_FromString(capability->name); - if(!s) - { - Py_DECREF(set); - return NULL; - } + if(!s) + { + Py_DECREF(set); + return NULL; + } - PySet_Add(set, s); + PySet_Add(set, s); } - capability++; - } + capability++; + } - return Py_BuildValue("sssO", caps.driver, caps.card, caps.bus_info, set); + return Py_BuildValue("sssO", caps.driver, caps.card, caps.bus_info, set); } static PyObject *Video_device_set_format(Video_device *self, PyObject *args) { - int size_x; - int size_y; - const char *fmt = NULL; - - if(!PyArg_ParseTuple(args, "ii|s", &size_x, &size_y, &fmt)) - { - return NULL; - } - - struct v4l2_format format; - format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - format.fmt.pix.width = size_x; - format.fmt.pix.height = size_y; + int size_x; + int size_y; + const char *fmt = NULL; + + if(!PyArg_ParseTuple(args, "ii|s", &size_x, &size_y, &fmt)) + { + return NULL; + } + + struct v4l2_format format; + format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + format.fmt.pix.width = size_x; + format.fmt.pix.height = size_y; #ifdef USE_LIBV4L - format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; - if(fmt != NULL && strcmp(fmt, "MJPEG")==0) - format.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; - if(fmt != NULL && strcmp(fmt, "RGB24")==0) - format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; - if(fmt != NULL && strcmp(fmt, "YUV420")==0) - format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420; + format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; + if(fmt != NULL && strcmp(fmt, "MJPEG")==0) + format.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; + if(fmt != NULL && strcmp(fmt, "RGB24")==0) + format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; + if(fmt != NULL && strcmp(fmt, "YUV420")==0) + format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420; #else - format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; + format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; #endif - format.fmt.pix.field = V4L2_FIELD_NONE; - format.fmt.pix.bytesperline = 0; + format.fmt.pix.field = V4L2_FIELD_NONE; + format.fmt.pix.bytesperline = 0; - if(my_ioctl(self->fd, VIDIOC_S_FMT, &format)) - { - return NULL; - } + if(my_ioctl(self->fd, VIDIOC_S_FMT, &format)) + { + return NULL; + } - return Py_BuildValue("ii", format.fmt.pix.width, format.fmt.pix.height); + return Py_BuildValue("ii", format.fmt.pix.width, format.fmt.pix.height); } static PyObject *Video_device_set_fps(Video_device *self, PyObject *args) { - int fps; - if(!PyArg_ParseTuple(args, "i", &fps)) - { - return NULL; - } - struct v4l2_streamparm setfps; - memset(&setfps, 0, sizeof(struct v4l2_streamparm)); - setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - setfps.parm.capture.timeperframe.numerator = 1; - setfps.parm.capture.timeperframe.denominator = fps; - if(my_ioctl(self->fd, VIDIOC_S_PARM, &setfps)){ - return NULL; - } - return Py_BuildValue("i",setfps.parm.capture.timeperframe.denominator); + int fps; + if(!PyArg_ParseTuple(args, "i", &fps)) + { + return NULL; + } + struct v4l2_streamparm setfps; + memset(&setfps, 0, sizeof(struct v4l2_streamparm)); + setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + setfps.parm.capture.timeperframe.numerator = 1; + setfps.parm.capture.timeperframe.denominator = fps; + if(my_ioctl(self->fd, VIDIOC_S_PARM, &setfps)){ + return NULL; + } + return Py_BuildValue("i",setfps.parm.capture.timeperframe.denominator); } static PyObject *Video_device_get_format(Video_device *self) @@ -307,238 +307,238 @@ static PyObject *Video_device_get_format(Video_device *self) static PyObject *Video_device_start(Video_device *self) { - ASSERT_OPEN; - enum v4l2_buf_type type; - type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + ASSERT_OPEN; + enum v4l2_buf_type type; + type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - if(my_ioctl(self->fd, VIDIOC_STREAMON, &type)) - { - return NULL; - } + if(my_ioctl(self->fd, VIDIOC_STREAMON, &type)) + { + return NULL; + } - Py_RETURN_NONE; + Py_RETURN_NONE; } static PyObject *Video_device_stop(Video_device *self) { - ASSERT_OPEN; - enum v4l2_buf_type type; - type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + ASSERT_OPEN; + enum v4l2_buf_type type; + type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - if(my_ioctl(self->fd, VIDIOC_STREAMOFF, &type)) - { - return NULL; - } + if(my_ioctl(self->fd, VIDIOC_STREAMOFF, &type)) + { + return NULL; + } - Py_RETURN_NONE; + Py_RETURN_NONE; } static PyObject *Video_device_create_buffers(Video_device *self, PyObject *args) { - int buffer_count; - - if(!PyArg_ParseTuple(args, "I", &buffer_count)) - { - return NULL; - } - - ASSERT_OPEN; - - if(self->buffers) - { - PyErr_SetString(PyExc_ValueError, "Buffers are already created"); - return NULL; - } - - struct v4l2_requestbuffers reqbuf; - reqbuf.count = buffer_count; - reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - reqbuf.memory = V4L2_MEMORY_MMAP; - - if(my_ioctl(self->fd, VIDIOC_REQBUFS, &reqbuf)) - { - return NULL; - } - - if(!reqbuf.count) - { - PyErr_SetString(PyExc_IOError, "Not enough buffer memory"); - return NULL; - } - - self->buffers = (struct buffer *)malloc(reqbuf.count * sizeof(struct buffer)); - - if(!self->buffers) - { - PyErr_NoMemory(); - return NULL; - } - - unsigned int i; - - for(i = 0; i < reqbuf.count; i++) - { - struct v4l2_buffer buffer; - buffer.index = i; - buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - buffer.memory = V4L2_MEMORY_MMAP; - - if(my_ioctl(self->fd, VIDIOC_QUERYBUF, &buffer)) + int buffer_count; + + if(!PyArg_ParseTuple(args, "I", &buffer_count)) + { + return NULL; + } + + ASSERT_OPEN; + + if(self->buffers) + { + PyErr_SetString(PyExc_ValueError, "Buffers are already created"); + return NULL; + } + + struct v4l2_requestbuffers reqbuf; + reqbuf.count = buffer_count; + reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + reqbuf.memory = V4L2_MEMORY_MMAP; + + if(my_ioctl(self->fd, VIDIOC_REQBUFS, &reqbuf)) + { + return NULL; + } + + if(!reqbuf.count) + { + PyErr_SetString(PyExc_IOError, "Not enough buffer memory"); + return NULL; + } + + self->buffers = (struct buffer *)malloc(reqbuf.count * sizeof(struct buffer)); + + if(!self->buffers) + { + PyErr_NoMemory(); + return NULL; + } + + unsigned int i; + + for(i = 0; i < reqbuf.count; i++) + { + struct v4l2_buffer buffer; + buffer.index = i; + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; + + if(my_ioctl(self->fd, VIDIOC_QUERYBUF, &buffer)) { - return NULL; + return NULL; } - self->buffers[i].length = buffer.length; - self->buffers[i].start = v4l2_mmap(NULL, buffer.length, - PROT_READ | PROT_WRITE, MAP_SHARED, self->fd, buffer.m.offset); + self->buffers[i].length = buffer.length; + self->buffers[i].start = v4l2_mmap(NULL, buffer.length, + PROT_READ | PROT_WRITE, MAP_SHARED, self->fd, buffer.m.offset); - if(self->buffers[i].start == MAP_FAILED) + if(self->buffers[i].start == MAP_FAILED) { - PyErr_SetFromErrno(PyExc_IOError); - return NULL; + PyErr_SetFromErrno(PyExc_IOError); + return NULL; } - } + } - self->buffer_count = i; - Py_RETURN_NONE; + self->buffer_count = i; + Py_RETURN_NONE; } static PyObject *Video_device_queue_all_buffers(Video_device *self) { - if(!self->buffers) - { - ASSERT_OPEN; - PyErr_SetString(PyExc_ValueError, "Buffers have not been created"); - return NULL; - } - - int i; - int buffer_count = self->buffer_count; - - for(i = 0; i < buffer_count; i++) - { - struct v4l2_buffer buffer; - buffer.index = i; - buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - buffer.memory = V4L2_MEMORY_MMAP; - - if(my_ioctl(self->fd, VIDIOC_QBUF, &buffer)) + if(!self->buffers) + { + ASSERT_OPEN; + PyErr_SetString(PyExc_ValueError, "Buffers have not been created"); + return NULL; + } + + int i; + int buffer_count = self->buffer_count; + + for(i = 0; i < buffer_count; i++) + { + struct v4l2_buffer buffer; + buffer.index = i; + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; + + if(my_ioctl(self->fd, VIDIOC_QBUF, &buffer)) { - return NULL; + return NULL; } - } + } - Py_RETURN_NONE; + Py_RETURN_NONE; } static PyObject *Video_device_read_internal(Video_device *self, int queue, int return_timestamp) { - if(!self->buffers) - { - ASSERT_OPEN; - PyErr_SetString(PyExc_ValueError, "Buffers have not been created"); - return NULL; - } - - struct v4l2_buffer buffer; - buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - buffer.memory = V4L2_MEMORY_MMAP; - - if(my_ioctl(self->fd, VIDIOC_DQBUF, &buffer)) - { - return NULL; - } + if(!self->buffers) + { + ASSERT_OPEN; + PyErr_SetString(PyExc_ValueError, "Buffers have not been created"); + return NULL; + } + + struct v4l2_buffer buffer; + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; + + if(my_ioctl(self->fd, VIDIOC_DQBUF, &buffer)) + { + return NULL; + } #ifdef USE_LIBV4L - PyObject *result = PyString_FromStringAndSize( - (const char*)self->buffers[buffer.index].start, buffer.bytesused); + PyObject *result = PyString_FromStringAndSize( + (const char*)self->buffers[buffer.index].start, buffer.bytesused); - if(!result) - { - return NULL; - } + if(!result) + { + return NULL; + } #else - // Convert buffer from YUYV to RGB. - // For the byte order, see: http://v4l2spec.bytesex.org/spec/r4339.htm - // For the color conversion, see: http://v4l2spec.bytesex.org/spec/x2123.htm - int length = buffer.bytesused * 6 / 4; - PyObject *result = PyString_FromStringAndSize(NULL, length); + // Convert buffer from YUYV to RGB. + // For the byte order, see: http://v4l2spec.bytesex.org/spec/r4339.htm + // For the color conversion, see: http://v4l2spec.bytesex.org/spec/x2123.htm + int length = buffer.bytesused * 6 / 4; + PyObject *result = PyString_FromStringAndSize(NULL, length); - if(!result) - { - return NULL; - } + if(!result) + { + return NULL; + } - char *rgb = PyString_AS_STRING(result); - char *rgb_max = rgb + length; - unsigned char *yuyv = self->buffers[buffer.index].start; + char *rgb = PyString_AS_STRING(result); + char *rgb_max = rgb + length; + unsigned char *yuyv = self->buffers[buffer.index].start; #define CLAMP(c) ((c) <= 0 ? 0 : (c) >= 65025 ? 255 : (c) >> 8) - while(rgb < rgb_max) - { - int u = yuyv[1] - 128; - int v = yuyv[3] - 128; - int uv = 100 * u + 208 * v; - u *= 516; - v *= 409; - - int y = 298 * (yuyv[0] - 16); - rgb[0] = CLAMP(y + v); - rgb[1] = CLAMP(y - uv); - rgb[2] = CLAMP(y + u); - - y = 298 * (yuyv[2] - 16); - rgb[3] = CLAMP(y + v); - rgb[4] = CLAMP(y - uv); - rgb[5] = CLAMP(y + u); - - rgb += 6; - yuyv += 4; - } + while(rgb < rgb_max) + { + int u = yuyv[1] - 128; + int v = yuyv[3] - 128; + int uv = 100 * u + 208 * v; + u *= 516; + v *= 409; + + int y = 298 * (yuyv[0] - 16); + rgb[0] = CLAMP(y + v); + rgb[1] = CLAMP(y - uv); + rgb[2] = CLAMP(y + u); + + y = 298 * (yuyv[2] - 16); + rgb[3] = CLAMP(y + v); + rgb[4] = CLAMP(y - uv); + rgb[5] = CLAMP(y + u); + + rgb += 6; + yuyv += 4; + } #undef CLAMP #endif - PyObject *out = result; + PyObject *out = result; - if(return_timestamp) - { - out = PyTuple_New(4); - PyTuple_SetItem(out, 0, result); - PyTuple_SetItem(out, 1, PyInt_FromLong(buffer.timestamp.tv_sec)); - PyTuple_SetItem(out, 2, PyInt_FromLong(buffer.timestamp.tv_usec)); - PyTuple_SetItem(out, 3, PyInt_FromLong(buffer.sequence)); - } + if(return_timestamp) + { + out = PyTuple_New(4); + PyTuple_SetItem(out, 0, result); + PyTuple_SetItem(out, 1, PyInt_FromLong(buffer.timestamp.tv_sec)); + PyTuple_SetItem(out, 2, PyInt_FromLong(buffer.timestamp.tv_usec)); + PyTuple_SetItem(out, 3, PyInt_FromLong(buffer.sequence)); + } - if(queue && my_ioctl(self->fd, VIDIOC_QBUF, &buffer)) - { - return NULL; - } + if(queue && my_ioctl(self->fd, VIDIOC_QBUF, &buffer)) + { + return NULL; + } - return out; + return out; } static PyObject *Video_device_read(Video_device *self, PyObject *args) { - int return_timestamp=0; + int return_timestamp=0; - if(!PyArg_ParseTuple(args, "|i", &return_timestamp)) - { - return NULL; - } + if(!PyArg_ParseTuple(args, "|i", &return_timestamp)) + { + return NULL; + } - return Video_device_read_internal(self, 0, return_timestamp); + return Video_device_read_internal(self, 0, return_timestamp); } static PyObject *Video_device_read_and_queue(Video_device *self, PyObject *args) { - int return_timestamp=0; + int return_timestamp=0; - if(!PyArg_ParseTuple(args, "|i", &return_timestamp)) - { - return NULL; - } + if(!PyArg_ParseTuple(args, "|i", &return_timestamp)) + { + return NULL; + } - return Video_device_read_internal(self, 1, return_timestamp); + return Video_device_read_internal(self, 1, return_timestamp); } // ********************************************************************* @@ -692,7 +692,7 @@ static PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) { PyErr_BadArgument(); PyErr_Format(PyExc_TypeError, "Function requires 1 argument"); - return NULL; + Py_RETURN_NONE; } PyObject *inBuffer = PyTuple_GetItem(args, 0); @@ -705,7 +705,7 @@ static PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) //PyObject_Print(type, stdout, Py_PRINT_RAW); //Py_CLEAR(type); - return NULL; + Py_RETURN_NONE; } int parsing = 1; @@ -771,13 +771,13 @@ static void Device_manager_dealloc(Device_manager *self) } static int Device_manager_init(Device_manager *self, PyObject *args, - PyObject *kwargs) + PyObject *kwargs) { return 0; } static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) -// self, dev = None, reqSize=(640, 480), reqFps = 30, fmt = "MJPEG"): +// self, dev = None, reqSize=(640, 480), reqFps = 30, fmt = "MJPEG", buffer_count = 10): { //Process arguments const char *devarg = NULL; @@ -791,11 +791,22 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) devarg = "/dev/video0"; } + long buffer_count = 10; + if(PyTuple_Size(args) >= 4) + { + PyObject *pybufferarg = PyTuple_GetItem(args, 4); + buffer_count = PyInt_AsLong(pybufferarg); + } + + //Check this device has not already been start + std::map::iterator it = self->fd.find(devarg); + if(it!=self->fd.end()) + { + PyErr_Format(PyExc_RuntimeError, "Device already started."); + Py_RETURN_NONE; + } + //Open the video device. - /*PyObject *arglist = Py_BuildValue("(s)", devarg); - PyObject *obj = PyObject_CallObject((PyObject *) &Device_manager_type, arglist); - Py_DECREF(arglist); - Py_DECREF(obj);*/ int fd = v4l2_open(devarg, O_RDWR | O_NONBLOCK); if(fd < 0) @@ -804,8 +815,11 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) Py_RETURN_NONE; } - //self->fd = fd; - //self->buffers = NULL; + self->fd[devarg] = fd; + self->buffers[devarg] = NULL; + + //Set other parameters for capture + //TODO /* //Suggest an image size to the device. The device may choose and @@ -817,18 +831,70 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) //Set target frames per second self.fps = self.video.set_fps(reqFps) + */ // Create a buffer to store image data in. This must be done before // calling 'start' if v4l2capture is compiled with libv4l2. Otherwise // raises IOError. - self.video.create_buffers(10) + + struct v4l2_requestbuffers reqbuf; + reqbuf.count = buffer_count; + reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + reqbuf.memory = V4L2_MEMORY_MMAP; + + if(my_ioctl(fd, VIDIOC_REQBUFS, &reqbuf)) + { + return NULL; + } + + if(!reqbuf.count) + { + PyErr_SetString(PyExc_IOError, "Not enough buffer memory"); + return NULL; + } + + struct buffer *buffs = (struct buffer *)malloc(reqbuf.count * sizeof(struct buffer)); + self->buffers[devarg] = buffs; + + if(!self->buffers[devarg]) + { + PyErr_NoMemory(); + return NULL; + } + + unsigned int i; + + for(i = 0; i < reqbuf.count; i++) + { + struct v4l2_buffer buffer; + buffer.index = i; + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; + + if(my_ioctl(fd, VIDIOC_QUERYBUF, &buffer)) + { + return NULL; + } + + buffs[i].length = buffer.length; + buffs[i].start = v4l2_mmap(NULL, buffer.length, + PROT_READ | PROT_WRITE, MAP_SHARED, fd, buffer.m.offset); + + if(buffs[i].start == MAP_FAILED) + { + PyErr_SetFromErrno(PyExc_IOError); + return NULL; + } + } + + self->buffer_count[devarg] = i; // Send the buffer to the device. Some devices require this to be done // before calling 'start'. - self.video.queue_all_buffers() + //self.video.queue_all_buffers() // Start the device. This lights the LED if it's a camera that has one. - self.video.start()*/ + //self.video.start()*/ Py_RETURN_NONE; } @@ -836,87 +902,87 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) // ********************************************************************* static PyMethodDef Video_device_methods[] = { - {"close", (PyCFunction)Video_device_close, METH_NOARGS, - "close()\n\n" - "Close video device. Subsequent calls to other methods will fail."}, - {"fileno", (PyCFunction)Video_device_fileno, METH_NOARGS, - "fileno() -> integer \"file descriptor\".\n\n" - "This enables video devices to be passed select.select for waiting " - "until a frame is available for reading."}, - {"get_info", (PyCFunction)Video_device_get_info, METH_NOARGS, - "get_info() -> driver, card, bus_info, capabilities\n\n" - "Returns three strings with information about the video device, and one " - "set containing strings identifying the capabilities of the video " - "device."}, - {"set_format", (PyCFunction)Video_device_set_format, METH_VARARGS, - "set_format(size_x, size_y, pixel_format='RGB24') -> size_x, size_y\n\n" - "Request the video device to set image size and format. The device may " - "choose another size than requested and will return its choice. The " - "pixel format may be either RGB24, YUV420 or MJPEG."}, - {"get_format", (PyCFunction)Video_device_get_format, METH_NOARGS, - "get_format() -> size_x, size_y\n\n"}, - {"set_fps", (PyCFunction)Video_device_set_fps, METH_VARARGS, - "set_fps(fps) -> fps \n\n" - "Request the video device to set frame per seconds.The device may " - "choose another frame rate than requested and will return its choice. " }, - {"start", (PyCFunction)Video_device_start, METH_NOARGS, - "start()\n\n" - "Start video capture."}, - {"stop", (PyCFunction)Video_device_stop, METH_NOARGS, - "stop()\n\n" - "Stop video capture."}, - {"create_buffers", (PyCFunction)Video_device_create_buffers, METH_VARARGS, - "create_buffers(count)\n\n" - "Create buffers used for capturing image data. Can only be called once " - "for each video device object."}, - {"queue_all_buffers", (PyCFunction)Video_device_queue_all_buffers, - METH_NOARGS, - "queue_all_buffers()\n\n" - "Let the video device fill all buffers created."}, - {"read", (PyCFunction)Video_device_read, METH_VARARGS, - "read(get_timestamp) -> string or tuple\n\n" - "Reads image data from a buffer that has been filled by the video " - "device. The image data is in RGB24, YUV420 or MJPEG format as decided by " - "'set_format'. The buffer is removed from the queue. Fails if no buffer " - "is filled. Use select.select to check for filled buffers. If " - "get_timestamp is true, a tuple is turned containing (sec, microsec, " - "sequence number)"}, - {"read_and_queue", (PyCFunction)Video_device_read_and_queue, METH_VARARGS, - "read_and_queue(get_timestamp)\n\n" - "Same as 'read', but adds the buffer back to the queue so the video " - "device can fill it again."}, - {NULL} + {"close", (PyCFunction)Video_device_close, METH_NOARGS, + "close()\n\n" + "Close video device. Subsequent calls to other methods will fail."}, + {"fileno", (PyCFunction)Video_device_fileno, METH_NOARGS, + "fileno() -> integer \"file descriptor\".\n\n" + "This enables video devices to be passed select.select for waiting " + "until a frame is available for reading."}, + {"get_info", (PyCFunction)Video_device_get_info, METH_NOARGS, + "get_info() -> driver, card, bus_info, capabilities\n\n" + "Returns three strings with information about the video device, and one " + "set containing strings identifying the capabilities of the video " + "device."}, + {"set_format", (PyCFunction)Video_device_set_format, METH_VARARGS, + "set_format(size_x, size_y, pixel_format='RGB24') -> size_x, size_y\n\n" + "Request the video device to set image size and format. The device may " + "choose another size than requested and will return its choice. The " + "pixel format may be either RGB24, YUV420 or MJPEG."}, + {"get_format", (PyCFunction)Video_device_get_format, METH_NOARGS, + "get_format() -> size_x, size_y\n\n"}, + {"set_fps", (PyCFunction)Video_device_set_fps, METH_VARARGS, + "set_fps(fps) -> fps \n\n" + "Request the video device to set frame per seconds.The device may " + "choose another frame rate than requested and will return its choice. " }, + {"start", (PyCFunction)Video_device_start, METH_NOARGS, + "start()\n\n" + "Start video capture."}, + {"stop", (PyCFunction)Video_device_stop, METH_NOARGS, + "stop()\n\n" + "Stop video capture."}, + {"create_buffers", (PyCFunction)Video_device_create_buffers, METH_VARARGS, + "create_buffers(count)\n\n" + "Create buffers used for capturing image data. Can only be called once " + "for each video device object."}, + {"queue_all_buffers", (PyCFunction)Video_device_queue_all_buffers, + METH_NOARGS, + "queue_all_buffers()\n\n" + "Let the video device fill all buffers created."}, + {"read", (PyCFunction)Video_device_read, METH_VARARGS, + "read(get_timestamp) -> string or tuple\n\n" + "Reads image data from a buffer that has been filled by the video " + "device. The image data is in RGB24, YUV420 or MJPEG format as decided by " + "'set_format'. The buffer is removed from the queue. Fails if no buffer " + "is filled. Use select.select to check for filled buffers. If " + "get_timestamp is true, a tuple is turned containing (sec, microsec, " + "sequence number)"}, + {"read_and_queue", (PyCFunction)Video_device_read_and_queue, METH_VARARGS, + "read_and_queue(get_timestamp)\n\n" + "Same as 'read', but adds the buffer back to the queue so the video " + "device can fill it again."}, + {NULL} }; static PyTypeObject Video_device_type = { - PyObject_HEAD_INIT(NULL) - 0, "v4l2capture.Video_device", sizeof(Video_device), 0, - (destructor)Video_device_dealloc, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, Py_TPFLAGS_DEFAULT, "Video_device(path)\n\nOpens the video device at " - "the given path and returns an object that can capture images. The " - "constructor and all methods except close may raise IOError.", 0, 0, 0, - 0, 0, 0, Video_device_methods, 0, 0, 0, 0, 0, 0, 0, - (initproc)Video_device_init + PyObject_HEAD_INIT(NULL) + 0, "v4l2capture.Video_device", sizeof(Video_device), 0, + (destructor)Video_device_dealloc, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, Py_TPFLAGS_DEFAULT, "Video_device(path)\n\nOpens the video device at " + "the given path and returns an object that can capture images. The " + "constructor and all methods except close may raise IOError.", 0, 0, 0, + 0, 0, 0, Video_device_methods, 0, 0, 0, 0, 0, 0, 0, + (initproc)Video_device_init }; // ********************************************************************* static PyMethodDef Device_manager_methods[] = { - {"start", (PyCFunction)Device_manager_Start, METH_VARARGS, - "start()\n\n" - "Start video capture."}, - {NULL} + {"start", (PyCFunction)Device_manager_Start, METH_VARARGS, + "start()\n\n" + "Start video capture."}, + {NULL} }; static PyTypeObject Device_manager_type = { - PyObject_HEAD_INIT(NULL) - 0, "v4l2capture.Device_manager", sizeof(Device_manager), 0, - (destructor)Device_manager_dealloc, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, Py_TPFLAGS_DEFAULT, "Video_device(path)\n\nOpens the video device at " - "the given path and returns an object that can capture images. The " - "constructor and all methods except close may raise IOError.", 0, 0, 0, - 0, 0, 0, Device_manager_methods, 0, 0, 0, 0, 0, 0, 0, - (initproc)Device_manager_init + PyObject_HEAD_INIT(NULL) + 0, "v4l2capture.Device_manager", sizeof(Device_manager), 0, + (destructor)Device_manager_dealloc, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, Py_TPFLAGS_DEFAULT, "Video_device(path)\n\nOpens the video device at " + "the given path and returns an object that can capture images. The " + "constructor and all methods except close may raise IOError.", 0, 0, 0, + 0, 0, 0, Device_manager_methods, 0, 0, 0, 0, 0, 0, 0, + (initproc)Device_manager_init }; // ********************************************************************* @@ -928,30 +994,30 @@ static PyMethodDef module_methods[] = { PyMODINIT_FUNC initv4l2capture(void) { - Video_device_type.tp_new = PyType_GenericNew; - Device_manager_type.tp_new = PyType_GenericNew; - - if(PyType_Ready(&Video_device_type) < 0) - { - return; - } - - if(PyType_Ready(&Device_manager_type) < 0) - { - return; - } - - PyObject *module = Py_InitModule3("v4l2capture", module_methods, - "Capture video with video4linux2."); - - if(!module) - { - return; - } - - Py_INCREF(&Video_device_type); - PyModule_AddObject(module, "Video_device", (PyObject *)&Video_device_type); - Py_INCREF(&Device_manager_type); - PyModule_AddObject(module, "Device_manager", (PyObject *)&Device_manager_type); + Video_device_type.tp_new = PyType_GenericNew; + Device_manager_type.tp_new = PyType_GenericNew; + + if(PyType_Ready(&Video_device_type) < 0) + { + return; + } + + if(PyType_Ready(&Device_manager_type) < 0) + { + return; + } + + PyObject *module = Py_InitModule3("v4l2capture", module_methods, + "Capture video with video4linux2."); + + if(!module) + { + return; + } + + Py_INCREF(&Video_device_type); + PyModule_AddObject(module, "Video_device", (PyObject *)&Video_device_type); + Py_INCREF(&Device_manager_type); + PyModule_AddObject(module, "Device_manager", (PyObject *)&Device_manager_type); } From d0d944b449cb54a6ee3b6b3be10aac679c4d2e1b Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 20 Oct 2013 03:46:14 +0100 Subject: [PATCH 012/256] Start and stop functions but it segfaults --- v4l2capture.cpp | 132 +++++++++++++++++++++++++++++++++--------------- 1 file changed, 92 insertions(+), 40 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 823d757..a8eb088 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -35,7 +35,7 @@ { \ PyErr_SetString(PyExc_ValueError, \ "I/O operation on closed file"); \ - return NULL; \ + Py_RETURN_NONE; \ } struct buffer { @@ -179,14 +179,14 @@ static PyObject *Video_device_get_info(Video_device *self) if(my_ioctl(self->fd, VIDIOC_QUERYCAP, &caps)) { - return NULL; + Py_RETURN_NONE; } PyObject *set = PySet_New(NULL); if(!set) { - return NULL; + Py_RETURN_NONE; } struct capability *capability = capabilities; @@ -200,7 +200,7 @@ static PyObject *Video_device_get_info(Video_device *self) if(!s) { Py_DECREF(set); - return NULL; + Py_RETURN_NONE; } PySet_Add(set, s); @@ -220,7 +220,7 @@ static PyObject *Video_device_set_format(Video_device *self, PyObject *args) if(!PyArg_ParseTuple(args, "ii|s", &size_x, &size_y, &fmt)) { - return NULL; + Py_RETURN_NONE; } struct v4l2_format format; @@ -243,7 +243,7 @@ static PyObject *Video_device_set_format(Video_device *self, PyObject *args) if(my_ioctl(self->fd, VIDIOC_S_FMT, &format)) { - return NULL; + Py_RETURN_NONE; } return Py_BuildValue("ii", format.fmt.pix.width, format.fmt.pix.height); @@ -254,7 +254,7 @@ static PyObject *Video_device_set_fps(Video_device *self, PyObject *args) int fps; if(!PyArg_ParseTuple(args, "i", &fps)) { - return NULL; + Py_RETURN_NONE; } struct v4l2_streamparm setfps; memset(&setfps, 0, sizeof(struct v4l2_streamparm)); @@ -262,7 +262,7 @@ static PyObject *Video_device_set_fps(Video_device *self, PyObject *args) setfps.parm.capture.timeperframe.numerator = 1; setfps.parm.capture.timeperframe.denominator = fps; if(my_ioctl(self->fd, VIDIOC_S_PARM, &setfps)){ - return NULL; + Py_RETURN_NONE; } return Py_BuildValue("i",setfps.parm.capture.timeperframe.denominator); } @@ -274,7 +274,7 @@ static PyObject *Video_device_get_format(Video_device *self) format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if(my_ioctl(self->fd, VIDIOC_G_FMT, &format)) { - return NULL; + Py_RETURN_NONE; } PyObject *out = PyTuple_New(3); @@ -313,7 +313,7 @@ static PyObject *Video_device_start(Video_device *self) if(my_ioctl(self->fd, VIDIOC_STREAMON, &type)) { - return NULL; + Py_RETURN_NONE; } Py_RETURN_NONE; @@ -327,7 +327,7 @@ static PyObject *Video_device_stop(Video_device *self) if(my_ioctl(self->fd, VIDIOC_STREAMOFF, &type)) { - return NULL; + Py_RETURN_NONE; } Py_RETURN_NONE; @@ -339,7 +339,7 @@ static PyObject *Video_device_create_buffers(Video_device *self, PyObject *args) if(!PyArg_ParseTuple(args, "I", &buffer_count)) { - return NULL; + Py_RETURN_NONE; } ASSERT_OPEN; @@ -347,7 +347,7 @@ static PyObject *Video_device_create_buffers(Video_device *self, PyObject *args) if(self->buffers) { PyErr_SetString(PyExc_ValueError, "Buffers are already created"); - return NULL; + Py_RETURN_NONE; } struct v4l2_requestbuffers reqbuf; @@ -357,13 +357,13 @@ static PyObject *Video_device_create_buffers(Video_device *self, PyObject *args) if(my_ioctl(self->fd, VIDIOC_REQBUFS, &reqbuf)) { - return NULL; + Py_RETURN_NONE; } if(!reqbuf.count) { PyErr_SetString(PyExc_IOError, "Not enough buffer memory"); - return NULL; + Py_RETURN_NONE; } self->buffers = (struct buffer *)malloc(reqbuf.count * sizeof(struct buffer)); @@ -371,7 +371,7 @@ static PyObject *Video_device_create_buffers(Video_device *self, PyObject *args) if(!self->buffers) { PyErr_NoMemory(); - return NULL; + Py_RETURN_NONE; } unsigned int i; @@ -385,7 +385,7 @@ static PyObject *Video_device_create_buffers(Video_device *self, PyObject *args) if(my_ioctl(self->fd, VIDIOC_QUERYBUF, &buffer)) { - return NULL; + Py_RETURN_NONE; } self->buffers[i].length = buffer.length; @@ -395,7 +395,7 @@ static PyObject *Video_device_create_buffers(Video_device *self, PyObject *args) if(self->buffers[i].start == MAP_FAILED) { PyErr_SetFromErrno(PyExc_IOError); - return NULL; + Py_RETURN_NONE; } } @@ -409,7 +409,7 @@ static PyObject *Video_device_queue_all_buffers(Video_device *self) { ASSERT_OPEN; PyErr_SetString(PyExc_ValueError, "Buffers have not been created"); - return NULL; + Py_RETURN_NONE; } int i; @@ -424,7 +424,7 @@ static PyObject *Video_device_queue_all_buffers(Video_device *self) if(my_ioctl(self->fd, VIDIOC_QBUF, &buffer)) { - return NULL; + Py_RETURN_NONE; } } @@ -437,7 +437,7 @@ static PyObject *Video_device_read_internal(Video_device *self, int queue, int r { ASSERT_OPEN; PyErr_SetString(PyExc_ValueError, "Buffers have not been created"); - return NULL; + Py_RETURN_NONE; } struct v4l2_buffer buffer; @@ -446,7 +446,7 @@ static PyObject *Video_device_read_internal(Video_device *self, int queue, int r if(my_ioctl(self->fd, VIDIOC_DQBUF, &buffer)) { - return NULL; + Py_RETURN_NONE; } #ifdef USE_LIBV4L @@ -455,7 +455,7 @@ static PyObject *Video_device_read_internal(Video_device *self, int queue, int r if(!result) { - return NULL; + Py_RETURN_NONE; } #else // Convert buffer from YUYV to RGB. @@ -466,7 +466,7 @@ static PyObject *Video_device_read_internal(Video_device *self, int queue, int r if(!result) { - return NULL; + Py_RETURN_NONE; } char *rgb = PyString_AS_STRING(result); @@ -511,7 +511,7 @@ static PyObject *Video_device_read_internal(Video_device *self, int queue, int r if(queue && my_ioctl(self->fd, VIDIOC_QBUF, &buffer)) { - return NULL; + Py_RETURN_NONE; } return out; @@ -523,7 +523,7 @@ static PyObject *Video_device_read(Video_device *self, PyObject *args) if(!PyArg_ParseTuple(args, "|i", &return_timestamp)) { - return NULL; + Py_RETURN_NONE; } return Video_device_read_internal(self, 0, return_timestamp); @@ -535,7 +535,7 @@ static PyObject *Video_device_read_and_queue(Video_device *self, PyObject *args) if(!PyArg_ParseTuple(args, "|i", &return_timestamp)) { - return NULL; + Py_RETURN_NONE; } return Video_device_read_internal(self, 1, return_timestamp); @@ -844,13 +844,13 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) if(my_ioctl(fd, VIDIOC_REQBUFS, &reqbuf)) { - return NULL; + Py_RETURN_NONE; } if(!reqbuf.count) { PyErr_SetString(PyExc_IOError, "Not enough buffer memory"); - return NULL; + Py_RETURN_NONE; } struct buffer *buffs = (struct buffer *)malloc(reqbuf.count * sizeof(struct buffer)); @@ -859,12 +859,10 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) if(!self->buffers[devarg]) { PyErr_NoMemory(); - return NULL; + Py_RETURN_NONE; } - unsigned int i; - - for(i = 0; i < reqbuf.count; i++) + for(unsigned int i = 0; i < reqbuf.count; i++) { struct v4l2_buffer buffer; buffer.index = i; @@ -873,7 +871,7 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) if(my_ioctl(fd, VIDIOC_QUERYBUF, &buffer)) { - return NULL; + Py_RETURN_NONE; } buffs[i].length = buffer.length; @@ -882,23 +880,74 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) if(buffs[i].start == MAP_FAILED) { - PyErr_SetFromErrno(PyExc_IOError); - return NULL; + PyErr_SetFromErrno(PyExc_IOError); + Py_RETURN_NONE; } } - self->buffer_count[devarg] = i; + self->buffer_count[devarg] = reqbuf.count; + buffer_count = self->buffer_count[devarg]; // Send the buffer to the device. Some devices require this to be done // before calling 'start'. - //self.video.queue_all_buffers() + + for(int i = 0; i < buffer_count; i++) + { + struct v4l2_buffer buffer; + buffer.index = i; + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; + + if(my_ioctl(fd, VIDIOC_QBUF, &buffer)) + { + Py_RETURN_NONE; + } + } // Start the device. This lights the LED if it's a camera that has one. - //self.video.start()*/ + enum v4l2_buf_type type; + type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + if(my_ioctl(fd, VIDIOC_STREAMON, &type)) + { + Py_RETURN_NONE; + } Py_RETURN_NONE; } +static PyObject *Device_manager_stop(Video_device *self, PyObject *args) +{ + //Process arguments + const char *devarg = NULL; + if(PyTuple_Size(args) < 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + else + { + devarg = "/dev/video0"; + } + + if(self->fd[devarg] < 0) + { + PyErr_SetString(PyExc_ValueError, "I/O operation on closed file"); + Py_RETURN_NONE; + } + + enum v4l2_buf_type type; + type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + if(my_ioctl(self->fd[devarg], VIDIOC_STREAMOFF, &type)) + { + Py_RETURN_NONE; + } + + Py_RETURN_NONE; +} + + // ********************************************************************* static PyMethodDef Video_device_methods[] = { @@ -969,8 +1018,11 @@ static PyTypeObject Video_device_type = { static PyMethodDef Device_manager_methods[] = { {"start", (PyCFunction)Device_manager_Start, METH_VARARGS, - "start()\n\n" + "start(dev = '\\dev\\video0', reqSize=(640, 480), reqFps = 30, fmt = 'MJPEG\', buffer_count = 10)\n\n" "Start video capture."}, + {"stop", (PyCFunction)Device_manager_stop, METH_NOARGS, + "stop()\n\n" + "Stop video capture."}, {NULL} }; From 66faa42bd64947059e5d8380d8d63e0b20808348 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 20 Oct 2013 04:08:51 +0100 Subject: [PATCH 013/256] Start and stop function works --- v4l2capture.cpp | 45 +++++++++++++++++++++++++++------------------ 1 file changed, 27 insertions(+), 18 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index a8eb088..0fdfed5 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -50,12 +50,14 @@ typedef struct { int buffer_count; } Video_device; -typedef struct { +class Device_manager_cl{ +public: PyObject_HEAD - std::map fd; - std::map buffers; - std::map buffer_count; -} Device_manager; + std::map *fd; + std::map *buffers; + std::map *buffer_counts; +}; +typedef Device_manager_cl Device_manager; struct capability { int id; @@ -767,21 +769,28 @@ static PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) static void Device_manager_dealloc(Device_manager *self) { + delete self->fd; + delete self->buffers; + delete self->buffer_counts; self->ob_type->tp_free((PyObject *)self); } static int Device_manager_init(Device_manager *self, PyObject *args, PyObject *kwargs) { + self->fd = new std::map; + self->buffers = new std::map; + self->buffer_counts = new std::map; return 0; } static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) // self, dev = None, reqSize=(640, 480), reqFps = 30, fmt = "MJPEG", buffer_count = 10): { + //Process arguments const char *devarg = NULL; - if(PyTuple_Size(args) < 1) + if(PyTuple_Size(args) >= 1) { PyObject *pydevarg = PyTuple_GetItem(args, 0); devarg = PyString_AsString(pydevarg); @@ -799,8 +808,8 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) } //Check this device has not already been start - std::map::iterator it = self->fd.find(devarg); - if(it!=self->fd.end()) + std::map::iterator it = self->fd->find(devarg); + if(it!=self->fd->end()) { PyErr_Format(PyExc_RuntimeError, "Device already started."); Py_RETURN_NONE; @@ -815,8 +824,8 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) Py_RETURN_NONE; } - self->fd[devarg] = fd; - self->buffers[devarg] = NULL; + (*self->fd)[devarg] = fd; + (*self->buffers)[devarg] = NULL; //Set other parameters for capture //TODO @@ -854,9 +863,9 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) } struct buffer *buffs = (struct buffer *)malloc(reqbuf.count * sizeof(struct buffer)); - self->buffers[devarg] = buffs; + (*self->buffers)[devarg] = buffs; - if(!self->buffers[devarg]) + if(!buffs) { PyErr_NoMemory(); Py_RETURN_NONE; @@ -885,8 +894,8 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) } } - self->buffer_count[devarg] = reqbuf.count; - buffer_count = self->buffer_count[devarg]; + (*self->buffer_counts)[devarg] = reqbuf.count; + buffer_count = reqbuf.count; // Send the buffer to the device. Some devices require this to be done // before calling 'start'. @@ -916,7 +925,7 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) Py_RETURN_NONE; } -static PyObject *Device_manager_stop(Video_device *self, PyObject *args) +static PyObject *Device_manager_stop(Device_manager *self, PyObject *args) { //Process arguments const char *devarg = NULL; @@ -930,7 +939,7 @@ static PyObject *Device_manager_stop(Video_device *self, PyObject *args) devarg = "/dev/video0"; } - if(self->fd[devarg] < 0) + if((*self->fd)[devarg] < 0) { PyErr_SetString(PyExc_ValueError, "I/O operation on closed file"); Py_RETURN_NONE; @@ -939,7 +948,7 @@ static PyObject *Device_manager_stop(Video_device *self, PyObject *args) enum v4l2_buf_type type; type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - if(my_ioctl(self->fd[devarg], VIDIOC_STREAMOFF, &type)) + if(my_ioctl((*self->fd)[devarg], VIDIOC_STREAMOFF, &type)) { Py_RETURN_NONE; } @@ -1030,7 +1039,7 @@ static PyTypeObject Device_manager_type = { PyObject_HEAD_INIT(NULL) 0, "v4l2capture.Device_manager", sizeof(Device_manager), 0, (destructor)Device_manager_dealloc, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, Py_TPFLAGS_DEFAULT, "Video_device(path)\n\nOpens the video device at " + 0, Py_TPFLAGS_DEFAULT, "Device_manager(path)\n\nOpens the video device at " "the given path and returns an object that can capture images. The " "constructor and all methods except close may raise IOError.", 0, 0, 0, 0, 0, 0, Device_manager_methods, 0, 0, 0, 0, 0, 0, 0, From 105a30e6ecb25eb7cbcdfee8134388fe9820026f Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 20 Oct 2013 04:14:37 +0100 Subject: [PATCH 014/256] Fix segfault in stop --- v4l2capture.cpp | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 0fdfed5..501e046 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -928,8 +928,9 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) static PyObject *Device_manager_stop(Device_manager *self, PyObject *args) { //Process arguments + const char *devarg = NULL; - if(PyTuple_Size(args) < 1) + if(PyTuple_Size(args) >= 1) { PyObject *pydevarg = PyTuple_GetItem(args, 0); devarg = PyString_AsString(pydevarg); @@ -1029,7 +1030,7 @@ static PyMethodDef Device_manager_methods[] = { {"start", (PyCFunction)Device_manager_Start, METH_VARARGS, "start(dev = '\\dev\\video0', reqSize=(640, 480), reqFps = 30, fmt = 'MJPEG\', buffer_count = 10)\n\n" "Start video capture."}, - {"stop", (PyCFunction)Device_manager_stop, METH_NOARGS, + {"stop", (PyCFunction)Device_manager_stop, METH_VARARGS, "stop()\n\n" "Stop video capture."}, {NULL} From 39b313faf86edae1a6b0ff30f3c1f5959e7f4dac Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 20 Oct 2013 05:21:36 +0100 Subject: [PATCH 015/256] Stopping and closing and restarting works --- setup.py | 3 +- v4l2capture.cpp | 191 +++++++++++++++++++++++++++++++++++++++++++++--- 2 files changed, 181 insertions(+), 13 deletions(-) diff --git a/setup.py b/setup.py index 66ff7b5..67e9cd9 100755 --- a/setup.py +++ b/setup.py @@ -25,4 +25,5 @@ "License :: Public Domain", "Programming Language :: C++"], ext_modules = [ - Extension("v4l2capture", ["v4l2capture.cpp"], libraries = ["v4l2"])]) + Extension("v4l2capture", ["v4l2capture.cpp"], libraries = ["v4l2", "pthread"])]) + diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 501e046..c1f1f23 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -19,6 +19,7 @@ #include #include #include +#include #ifdef USE_LIBV4L #include @@ -56,6 +57,7 @@ class Device_manager_cl{ std::map *fd; std::map *buffers; std::map *buffer_counts; + std::map *threadArgStore; }; typedef Device_manager_cl Device_manager; @@ -109,23 +111,25 @@ static void Video_device_unmap(Video_device *self) int i; for(i = 0; i < self->buffer_count; i++) - { - v4l2_munmap(self->buffers[i].start, self->buffers[i].length); - } + { + v4l2_munmap(self->buffers[i].start, self->buffers[i].length); + } + free(self->buffers); + self->buffers = NULL; } static void Video_device_dealloc(Video_device *self) { if(self->fd >= 0) - { - if(self->buffers) { - Video_device_unmap(self); - } - - v4l2_close(self->fd); + if(self->buffers) + { + Video_device_unmap(self); } + v4l2_close(self->fd); + } + self->ob_type->tp_free((PyObject *)self); } @@ -767,11 +771,88 @@ static PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) // ********************************************************************* +class Device_manager_Worker_thread_args +{ +public: + Device_manager *self; + std::string devName; + int stop; + int stopped; + pthread_mutex_t lock; + + Device_manager_Worker_thread_args() + { + stop = 0; + stopped = 1; + pthread_mutex_init(&lock, NULL); + }; + + virtual ~Device_manager_Worker_thread_args() + { + pthread_mutex_destroy(&lock); + }; + + void Stop() + { + pthread_mutex_lock(&this->lock); + this->stop = 1; + pthread_mutex_unlock(&this->lock); + }; + + void WaitForStop() + { + while(1) + { + pthread_mutex_lock(&this->lock); + int s = this->stopped; + pthread_mutex_unlock(&this->lock); + + if(s) return; + usleep(10000); + } + }; +}; + +void *Device_manager_Worker_thread(void *arg) +{ + class Device_manager_Worker_thread_args *argobj = (class Device_manager_Worker_thread_args*) arg; + printf("Thread started\n"); + int running = 1; + pthread_mutex_lock(&argobj->lock); + argobj->stopped = 0; + pthread_mutex_unlock(&argobj->lock); + + while(running) + { + usleep(1000); +/* int return_timestamp=0; + + if(!PyArg_ParseTuple(args, "|i", &return_timestamp)) + { + Py_RETURN_NONE; + } + + return Video_device_read_internal(self, 1, return_timestamp);*/ + pthread_mutex_lock(&argobj->lock); + running = !argobj->stop; + pthread_mutex_unlock(&argobj->lock); + } + printf("Thread stopping\n"); + pthread_mutex_lock(&argobj->lock); + argobj->stopped = 1; + pthread_mutex_unlock(&argobj->lock); + + return NULL; +} + +// ********************************************************************** + static void Device_manager_dealloc(Device_manager *self) { delete self->fd; delete self->buffers; delete self->buffer_counts; + delete self->threadArgStore; self->ob_type->tp_free((PyObject *)self); } @@ -781,6 +862,7 @@ static int Device_manager_init(Device_manager *self, PyObject *args, self->fd = new std::map; self->buffers = new std::map; self->buffer_counts = new std::map; + self->threadArgStore = new std::map; return 0; } @@ -814,7 +896,7 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) PyErr_Format(PyExc_RuntimeError, "Device already started."); Py_RETURN_NONE; } - + printf("a\n"); //Open the video device. int fd = v4l2_open(devarg, O_RDWR | O_NONBLOCK); @@ -823,6 +905,7 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) PyErr_SetFromErrnoWithFilename(PyExc_IOError, devarg); Py_RETURN_NONE; } + printf("b\n"); (*self->fd)[devarg] = fd; (*self->buffers)[devarg] = NULL; @@ -853,6 +936,7 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) if(my_ioctl(fd, VIDIOC_REQBUFS, &reqbuf)) { + PyErr_SetString(PyExc_IOError, "VIDIOC_REQBUFS failed"); Py_RETURN_NONE; } @@ -861,6 +945,7 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) PyErr_SetString(PyExc_IOError, "Not enough buffer memory"); Py_RETURN_NONE; } + printf("c\n"); struct buffer *buffs = (struct buffer *)malloc(reqbuf.count * sizeof(struct buffer)); (*self->buffers)[devarg] = buffs; @@ -893,7 +978,7 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) Py_RETURN_NONE; } } - + printf("d\n"); (*self->buffer_counts)[devarg] = reqbuf.count; buffer_count = reqbuf.count; @@ -921,7 +1006,15 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) { Py_RETURN_NONE; } + + pthread_t thread; + Device_manager_Worker_thread_args *threadArgs = new Device_manager_Worker_thread_args; + (*self->threadArgStore)[devarg] = threadArgs; + threadArgs->self = self; + threadArgs->devName = devarg; + pthread_create(&thread, NULL, Device_manager_Worker_thread, threadArgs); + printf("Create done\n"); Py_RETURN_NONE; } @@ -940,12 +1033,23 @@ static PyObject *Device_manager_stop(Device_manager *self, PyObject *args) devarg = "/dev/video0"; } + std::map::iterator it = self->fd->find(devarg); + if(it==self->fd->end()) + { + PyErr_Format(PyExc_RuntimeError, "Device not started."); + Py_RETURN_NONE; + } + if((*self->fd)[devarg] < 0) { PyErr_SetString(PyExc_ValueError, "I/O operation on closed file"); Py_RETURN_NONE; } + //Stop worker thread + (*self->threadArgStore)[devarg]->Stop(); + + //Signal V4l2 api enum v4l2_buf_type type; type = V4L2_BUF_TYPE_VIDEO_CAPTURE; @@ -954,9 +1058,69 @@ static PyObject *Device_manager_stop(Device_manager *self, PyObject *args) Py_RETURN_NONE; } + //Release memeory + (*self->threadArgStore)[devarg]->WaitForStop(); + delete (*self->threadArgStore)[devarg]; + self->threadArgStore->erase(devarg); + Py_RETURN_NONE; } +static PyObject *Device_manager_close(Device_manager *self, PyObject *args) +{ + //Process arguments + const char *devarg = NULL; + if(PyTuple_Size(args) >= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + else + { + devarg = "/dev/video0"; + } + + //Check if thread is still running + std::map::iterator it3 = self->threadArgStore->find(devarg); + if(it3 != self->threadArgStore->end()) + { + //Stop thread that is still running + Device_manager_stop(self, args); + } + + std::map::iterator it = self->fd->find(devarg); + if(it==self->fd->end()) + { + PyErr_Format(PyExc_RuntimeError, "Device not started."); + Py_RETURN_NONE; + } + + int fd = (*self->fd)[devarg]; + + std::map::iterator it2 = self->buffers->find(devarg); + if(it2 != self->buffers->end()) + { + struct buffer *buffers = (*self->buffers)[devarg]; + int buffer_count = (*self->buffer_counts)[devarg]; + + for(int i = 0; i < buffer_count; i++) + { + v4l2_munmap(buffers[i].start, buffers[i].length); + } + free (buffers); + + //Release memory + self->buffers->erase(devarg); + self->buffer_counts->erase(devarg); + } + + //Release memory + v4l2_close(fd); + fd = -1; + self->fd->erase(devarg); + + Py_RETURN_NONE; +} // ********************************************************************* @@ -1031,8 +1195,11 @@ static PyMethodDef Device_manager_methods[] = { "start(dev = '\\dev\\video0', reqSize=(640, 480), reqFps = 30, fmt = 'MJPEG\', buffer_count = 10)\n\n" "Start video capture."}, {"stop", (PyCFunction)Device_manager_stop, METH_VARARGS, - "stop()\n\n" + "stop(dev = '\\dev\\video0')\n\n" "Stop video capture."}, + {"close", (PyCFunction)Device_manager_close, METH_VARARGS, + "close(dev = '\\dev\\video0')\n\n" + "Close video device. Subsequent calls to other methods will fail."}, {NULL} }; From f86020d574b7ba6a921a7a4a9d09166fcdcd7303 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 20 Oct 2013 05:26:44 +0100 Subject: [PATCH 016/256] Tidy code --- v4l2capture.cpp | 60 ++++++++++++++++++++++++++++++++++++------------- 1 file changed, 44 insertions(+), 16 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index c1f1f23..1e06a12 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -866,10 +866,8 @@ static int Device_manager_init(Device_manager *self, PyObject *args, return 0; } -static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) -// self, dev = None, reqSize=(640, 480), reqFps = 30, fmt = "MJPEG", buffer_count = 10): +static PyObject *Device_manager_open(Device_manager *self, PyObject *args) { - //Process arguments const char *devarg = NULL; if(PyTuple_Size(args) >= 1) @@ -882,21 +880,14 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) devarg = "/dev/video0"; } - long buffer_count = 10; - if(PyTuple_Size(args) >= 4) - { - PyObject *pybufferarg = PyTuple_GetItem(args, 4); - buffer_count = PyInt_AsLong(pybufferarg); - } - - //Check this device has not already been start + //Check this device has not already been opened std::map::iterator it = self->fd->find(devarg); if(it!=self->fd->end()) { - PyErr_Format(PyExc_RuntimeError, "Device already started."); + PyErr_Format(PyExc_RuntimeError, "Device already opened."); Py_RETURN_NONE; } - printf("a\n"); + //Open the video device. int fd = v4l2_open(devarg, O_RDWR | O_NONBLOCK); @@ -905,11 +896,46 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) PyErr_SetFromErrnoWithFilename(PyExc_IOError, devarg); Py_RETURN_NONE; } - printf("b\n"); (*self->fd)[devarg] = fd; (*self->buffers)[devarg] = NULL; + Py_RETURN_NONE; +} + +static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) +// self, dev = None, reqSize=(640, 480), reqFps = 30, fmt = "MJPEG", buffer_count = 10): +{ + + //Process arguments + const char *devarg = NULL; + if(PyTuple_Size(args) >= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + else + { + devarg = "/dev/video0"; + } + + long buffer_count = 10; + if(PyTuple_Size(args) >= 4) + { + PyObject *pybufferarg = PyTuple_GetItem(args, 4); + buffer_count = PyInt_AsLong(pybufferarg); + } + + //Check this device has not already been start + std::map::iterator it = self->fd->find(devarg); + if(it==self->fd->end()) + { + PyErr_Format(PyExc_RuntimeError, "Device not open."); + Py_RETURN_NONE; + } + + int fd = (*self->fd)[devarg]; + //Set other parameters for capture //TODO @@ -945,7 +971,6 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) PyErr_SetString(PyExc_IOError, "Not enough buffer memory"); Py_RETURN_NONE; } - printf("c\n"); struct buffer *buffs = (struct buffer *)malloc(reqbuf.count * sizeof(struct buffer)); (*self->buffers)[devarg] = buffs; @@ -978,7 +1003,7 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) Py_RETURN_NONE; } } - printf("d\n"); + (*self->buffer_counts)[devarg] = reqbuf.count; buffer_count = reqbuf.count; @@ -1191,6 +1216,9 @@ static PyTypeObject Video_device_type = { // ********************************************************************* static PyMethodDef Device_manager_methods[] = { + {"open", (PyCFunction)Device_manager_open, METH_VARARGS, + "open(dev = '\\dev\\video0')\n\n" + "Open video capture."}, {"start", (PyCFunction)Device_manager_Start, METH_VARARGS, "start(dev = '\\dev\\video0', reqSize=(640, 480), reqFps = 30, fmt = 'MJPEG\', buffer_count = 10)\n\n" "Start video capture."}, From 0e05ba73b86f795f05c677da30162067c4838fc9 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 20 Oct 2013 05:27:16 +0100 Subject: [PATCH 017/256] Tidy code --- v4l2capture.cpp | 1 - 1 file changed, 1 deletion(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 1e06a12..4136d83 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -1039,7 +1039,6 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) threadArgs->devName = devarg; pthread_create(&thread, NULL, Device_manager_Worker_thread, threadArgs); - printf("Create done\n"); Py_RETURN_NONE; } From 71398168ef9485432608c258c617f559a0b1705d Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 20 Oct 2013 05:41:04 +0100 Subject: [PATCH 018/256] Properly do deinit --- v4l2capture.cpp | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 4136d83..27a034a 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -61,6 +61,9 @@ class Device_manager_cl{ }; typedef Device_manager_cl Device_manager; +static PyObject *Device_manager_stop(Device_manager *self, PyObject *args); +static PyObject *Device_manager_close(Device_manager *self, PyObject *args); + struct capability { int id; const char *name; @@ -205,7 +208,7 @@ static PyObject *Video_device_get_info(Video_device *self) if(!s) { - Py_DECREF(set); + Py_DECREF(set); Py_RETURN_NONE; } @@ -849,6 +852,26 @@ void *Device_manager_Worker_thread(void *arg) static void Device_manager_dealloc(Device_manager *self) { + //Stop high level threads + for(std::map::iterator it = self->threadArgStore->begin(); + it != self->threadArgStore->end(); it++) + { + PyObject *args = PyTuple_New(1); + PyTuple_SetItem(args, 0, PyString_FromString(it->first.c_str())); + Device_manager_stop(self, args); + Py_DECREF(args); + } + + //Close devices + for(std::map::iterator it = self->fd->begin(); + it != self->fd->end(); it++) + { + PyObject *args = PyTuple_New(1); + PyTuple_SetItem(args, 0, PyString_FromString(it->first.c_str())); + Device_manager_close(self, args); + Py_DECREF(args); + } + delete self->fd; delete self->buffers; delete self->buffer_counts; From d59be190cfb917f50dbdf9a41ea732c1ab17b7d6 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 20 Oct 2013 06:14:33 +0100 Subject: [PATCH 019/256] Start on reading frame in worker thread --- v4l2capture.cpp | 126 +++++++++++++++++++++++++++++++++++++++--------- 1 file changed, 104 insertions(+), 22 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 27a034a..08b41c5 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -814,36 +814,118 @@ class Device_manager_Worker_thread_args usleep(10000); } }; -}; -void *Device_manager_Worker_thread(void *arg) -{ - class Device_manager_Worker_thread_args *argobj = (class Device_manager_Worker_thread_args*) arg; - printf("Thread started\n"); - int running = 1; - pthread_mutex_lock(&argobj->lock); - argobj->stopped = 0; - pthread_mutex_unlock(&argobj->lock); - - while(running) + void ReadFrame() { - usleep(1000); -/* int return_timestamp=0; + /*if(!self->buffers) + { + ASSERT_OPEN; + PyErr_SetString(PyExc_ValueError, "Buffers have not been created"); + Py_RETURN_NONE; + } - if(!PyArg_ParseTuple(args, "|i", &return_timestamp)) + struct v4l2_buffer buffer; + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; + + if(my_ioctl(self->fd, VIDIOC_DQBUF, &buffer)) + { + Py_RETURN_NONE; + } + + #ifdef USE_LIBV4L + PyObject *result = PyString_FromStringAndSize( + (const char*)self->buffers[buffer.index].start, buffer.bytesused); + + if(!result) + { + Py_RETURN_NONE; + } + #else + // Convert buffer from YUYV to RGB. + // For the byte order, see: http://v4l2spec.bytesex.org/spec/r4339.htm + // For the color conversion, see: http://v4l2spec.bytesex.org/spec/x2123.htm + int length = buffer.bytesused * 6 / 4; + PyObject *result = PyString_FromStringAndSize(NULL, length); + + if(!result) { Py_RETURN_NONE; } - return Video_device_read_internal(self, 1, return_timestamp);*/ - pthread_mutex_lock(&argobj->lock); - running = !argobj->stop; - pthread_mutex_unlock(&argobj->lock); + char *rgb = PyString_AS_STRING(result); + char *rgb_max = rgb + length; + unsigned char *yuyv = self->buffers[buffer.index].start; + + #define CLAMP(c) ((c) <= 0 ? 0 : (c) >= 65025 ? 255 : (c) >> 8) + while(rgb < rgb_max) + { + int u = yuyv[1] - 128; + int v = yuyv[3] - 128; + int uv = 100 * u + 208 * v; + u *= 516; + v *= 409; + + int y = 298 * (yuyv[0] - 16); + rgb[0] = CLAMP(y + v); + rgb[1] = CLAMP(y - uv); + rgb[2] = CLAMP(y + u); + + y = 298 * (yuyv[2] - 16); + rgb[3] = CLAMP(y + v); + rgb[4] = CLAMP(y - uv); + rgb[5] = CLAMP(y + u); + + rgb += 6; + yuyv += 4; + } + #undef CLAMP + #endif + PyObject *out = result; + + if(return_timestamp) + { + out = PyTuple_New(4); + PyTuple_SetItem(out, 0, result); + PyTuple_SetItem(out, 1, PyInt_FromLong(buffer.timestamp.tv_sec)); + PyTuple_SetItem(out, 2, PyInt_FromLong(buffer.timestamp.tv_usec)); + PyTuple_SetItem(out, 3, PyInt_FromLong(buffer.sequence)); + } + + if(queue && my_ioctl(self->fd, VIDIOC_QBUF, &buffer)) + { + Py_RETURN_NONE; + }*/ } - printf("Thread stopping\n"); - pthread_mutex_lock(&argobj->lock); - argobj->stopped = 1; - pthread_mutex_unlock(&argobj->lock); + + void Run() + { + printf("Thread started\n"); + int running = 1; + pthread_mutex_lock(&this->lock); + this->stopped = 0; + pthread_mutex_unlock(&this->lock); + + while(running) + { + usleep(1000); + this->ReadFrame(); + + pthread_mutex_lock(&this->lock); + running = !this->stop; + pthread_mutex_unlock(&this->lock); + } + printf("Thread stopping\n"); + pthread_mutex_lock(&this->lock); + this->stopped = 1; + pthread_mutex_unlock(&this->lock); + }; +}; + +void *Device_manager_Worker_thread(void *arg) +{ + class Device_manager_Worker_thread_args *argobj = (class Device_manager_Worker_thread_args*) arg; + argobj->Run(); return NULL; } From c55d5f3122ecc509446cc6c22255ebdbc2963c15 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 20 Oct 2013 06:19:58 +0100 Subject: [PATCH 020/256] Change license to GPL --- README | 7 +- capture_picture.py | 16 +- capture_picture_delayed.py | 12 +- gpl-2.0.txt | 339 +++++++++++++++++++++++++++++++++++++ list_devices.py | 12 +- setup.py | 12 +- v4l2capture.cpp | 10 +- 7 files changed, 368 insertions(+), 40 deletions(-) create mode 100644 gpl-2.0.txt diff --git a/README b/README index 26f5501..d52c42d 100644 --- a/README +++ b/README @@ -3,11 +3,9 @@ Python extension to capture video with video4linux2 2009, 2010, 2011 Fredrik Portstrom 2011 Joakim Gebart +2013 Tim Sheerman-Chase -I, the copyright holder of this file, hereby release it into the -public domain. This applies worldwide. In case this is not legally -possible: I grant anyone the right to use this work for any purpose, -without any conditions, unless such conditions are required by law. +This software may be used and redistributed under the terms of the GPL v2 (or later) license. Introduction ============ @@ -55,3 +53,4 @@ Change log 1.1 (2009-11-03) - Updated URL and documentation. 1.0 (2009-02-28) - Initial release. + diff --git a/capture_picture.py b/capture_picture.py index 0fec3bb..6e47d25 100755 --- a/capture_picture.py +++ b/capture_picture.py @@ -1,17 +1,11 @@ #!/usr/bin/python -# # python-v4l2capture +# Python extension to capture video with video4linux2 # -# This file is an example on how to capture a picture with -# python-v4l2capture. -# -# 2009, 2010 Fredrik Portstrom -# -# I, the copyright holder of this file, hereby release it into the -# public domain. This applies worldwide. In case this is not legally -# possible: I grant anyone the right to use this work for any -# purpose, without any conditions, unless such conditions are -# required by law. +# 2009, 2010, 2011 Fredrik Portstrom, released into the public domain +# 2011, Joakim Gebart +# 2013, Tim Sheerman-Chase +# See README for license import Image import select diff --git a/capture_picture_delayed.py b/capture_picture_delayed.py index 9597d7b..0f2b988 100755 --- a/capture_picture_delayed.py +++ b/capture_picture_delayed.py @@ -9,13 +9,13 @@ # devices that require starting to capture pictures immediatly when # the device is started. # -# 2009, 2010 Fredrik Portstrom +# python-v4l2capture +# Python extension to capture video with video4linux2 # -# I, the copyright holder of this file, hereby release it into the -# public domain. This applies worldwide. In case this is not legally -# possible: I grant anyone the right to use this work for any -# purpose, without any conditions, unless such conditions are -# required by law. +# 2009, 2010, 2011 Fredrik Portstrom, released into the public domain +# 2011, Joakim Gebart +# 2013, Tim Sheerman-Chase +# See README for license import Image import select diff --git a/gpl-2.0.txt b/gpl-2.0.txt new file mode 100644 index 0000000..d159169 --- /dev/null +++ b/gpl-2.0.txt @@ -0,0 +1,339 @@ + GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +License is intended to guarantee your freedom to share and change free +software--to make sure the software is free for all its users. This +General Public License applies to most of the Free Software +Foundation's software and to any other program whose authors commit to +using it. (Some other Free Software Foundation software is covered by +the GNU Lesser General Public License instead.) You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +this service if you wish), that you receive source code or can get it +if you want it, that you can change the software or use pieces of it +in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid +anyone to deny you these rights or to ask you to surrender the rights. +These restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must give the recipients all the rights that +you have. You must make sure that they, too, receive or can get the +source code. And you must show them these terms so they know their +rights. + + We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations. + + Finally, any free program is threatened constantly by software +patents. We wish to avoid the danger that redistributors of a free +program will individually obtain patent licenses, in effect making the +program proprietary. To prevent this, we have made it clear that any +patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and +modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains +a notice placed by the copyright holder saying it may be distributed +under the terms of this General Public License. The "Program", below, +refers to any such program or work, and a "work based on the Program" +means either the Program or any derivative work under copyright law: +that is to say, a work containing the Program or a portion of it, +either verbatim or with modifications and/or translated into another +language. (Hereinafter, translation is included without limitation in +the term "modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running the Program is not restricted, and the output from the Program +is covered only if its contents constitute a work based on the +Program (independent of having been made by running the Program). +Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's +source code as you receive it, in any medium, provided that you +conspicuously and appropriately publish on each copy an appropriate +copyright notice and disclaimer of warranty; keep intact all the +notices that refer to this License and to the absence of any warranty; +and give any other recipients of the Program a copy of this License +along with the Program. + +You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion +of it, thus forming a work based on the Program, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Program, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, +under Section 2) in object code or executable form under the terms of +Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + +The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source +code means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to +control compilation and installation of the executable. However, as a +special exception, the source code distributed need not include +anything that is normally distributed (in either source or binary +form) with the major components (compiler, kernel, and so on) of the +operating system on which the executable runs, unless that component +itself accompanies the executable. + +If distribution of executable or object code is made by offering +access to copy from a designated place, then offering equivalent +access to copy the source code from the same place counts as +distribution of the source code, even though third parties are not +compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense or distribute the Program is +void, and will automatically terminate your rights under this License. +However, parties who have received copies, or rights, from you under +this License will not have their licenses terminated so long as such +parties remain in full compliance. + + 5. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the +Program), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties to +this License. + + 7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Program at all. For example, if a patent +license would not permit royalty-free redistribution of the Program by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License +may add an explicit geographical distribution limitation excluding +those countries, so that distribution is permitted only in or among +countries not thus excluded. In such case, this License incorporates +the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions +of the General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and conditions +either of that version or of any later version published by the Free +Software Foundation. If the Program does not specify a version number of +this License, you may choose any version ever published by the Free Software +Foundation. + + 10. If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the author +to ask for permission. For software which is copyrighted by the Free +Software Foundation, write to the Free Software Foundation; we sometimes +make exceptions for this. Our decision will be guided by the two goals +of preserving the free status of all derivatives of our free software and +of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN +OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES +PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED +OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS +TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE +PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, +REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR +REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, +INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING +OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED +TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY +YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER +PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along + with this program; if not, write to the Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this +when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, the commands you use may +be called something other than `show w' and `show c'; they could even be +mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the program, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program + `Gnomovision' (which makes passes at compilers) written by James Hacker. + + , 1 April 1989 + Ty Coon, President of Vice + +This General Public License does not permit incorporating your program into +proprietary programs. If your program is a subroutine library, you may +consider it more useful to permit linking proprietary applications with the +library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. diff --git a/list_devices.py b/list_devices.py index 294d17d..75f3820 100755 --- a/list_devices.py +++ b/list_devices.py @@ -2,13 +2,13 @@ # # python-v4l2capture # -# 2009, 2010 Fredrik Portstrom +# python-v4l2capture +# Python extension to capture video with video4linux2 # -# I, the copyright holder of this file, hereby release it into the -# public domain. This applies worldwide. In case this is not legally -# possible: I grant anyone the right to use this work for any -# purpose, without any conditions, unless such conditions are -# required by law. +# 2009, 2010, 2011 Fredrik Portstrom, released into the public domain +# 2011, Joakim Gebart +# 2013, Tim Sheerman-Chase +# See README for license import os import v4l2capture diff --git a/setup.py b/setup.py index 67e9cd9..f05492f 100755 --- a/setup.py +++ b/setup.py @@ -2,13 +2,13 @@ # # python-v4l2capture # -# 2009, 2010, 2011 Fredrik Portstrom +# python-v4l2capture +# Python extension to capture video with video4linux2 # -# I, the copyright holder of this file, hereby release it into the -# public domain. This applies worldwide. In case this is not legally -# possible: I grant anyone the right to use this work for any -# purpose, without any conditions, unless such conditions are -# required by law. +# 2009, 2010, 2011 Fredrik Portstrom, released into the public domain +# 2011, Joakim Gebart +# 2013, Tim Sheerman-Chase +# See README for license from distutils.core import Extension, setup setup( diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 08b41c5..32afc5d 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -1,14 +1,10 @@ // python-v4l2capture // Python extension to capture video with video4linux2 // -// 2009, 2010, 2011 Fredrik Portstrom +// 2009, 2010, 2011 Fredrik Portstrom, released into the public domain +// 2011, Joakim Gebart // 2013, Tim Sheerman-Chase -// -// I, the copyright holder of this file, hereby release it into the -// public domain. This applies worldwide. In case this is not legally -// possible: I grant anyone the right to use this work for any -// purpose, without any conditions, unless such conditions are -// required by law. +// See README for license #define USE_LIBV4L From 74535a27d5d1498b602737ef97696c26cc528dcf Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 20 Oct 2013 06:46:06 +0100 Subject: [PATCH 021/256] Problems reading frame --- v4l2capture.cpp | 59 ++++++++++++++++++++++++++++--------------------- 1 file changed, 34 insertions(+), 25 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 32afc5d..d7cf849 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -15,6 +15,7 @@ #include #include #include +#include #include #ifdef USE_LIBV4L @@ -811,32 +812,29 @@ class Device_manager_Worker_thread_args } }; - void ReadFrame() + int ReadFrame() { - /*if(!self->buffers) + std::map::iterator it = self->buffers->find(this->devName); + if(it == self->buffers->end()) { - ASSERT_OPEN; - PyErr_SetString(PyExc_ValueError, "Buffers have not been created"); - Py_RETURN_NONE; + throw std::runtime_error("Buffers have not been created"); + return 0; } struct v4l2_buffer buffer; buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buffer.memory = V4L2_MEMORY_MMAP; - - if(my_ioctl(self->fd, VIDIOC_DQBUF, &buffer)) + int fd = (*self->fd)[this->devName]; + printf("a %d\n", fd); + if(my_ioctl(fd, VIDIOC_DQBUF, &buffer)) { - Py_RETURN_NONE; + throw std::runtime_error("VIDIOC_DQBUF failed"); + return 0; } - + printf("b\n"); #ifdef USE_LIBV4L - PyObject *result = PyString_FromStringAndSize( - (const char*)self->buffers[buffer.index].start, buffer.bytesused); + printf("Rx %d\n", buffer.bytesused); //self->buffers[buffer.index].start, buffer.bytesused - if(!result) - { - Py_RETURN_NONE; - } #else // Convert buffer from YUYV to RGB. // For the byte order, see: http://v4l2spec.bytesex.org/spec/r4339.htm @@ -846,7 +844,8 @@ class Device_manager_Worker_thread_args if(!result) { - Py_RETURN_NONE; + throw std::runtime_error("String convert failed"); + return 0; } char *rgb = PyString_AS_STRING(result); @@ -877,21 +876,24 @@ class Device_manager_Worker_thread_args } #undef CLAMP #endif - PyObject *out = result; - if(return_timestamp) + /*if(1) { out = PyTuple_New(4); PyTuple_SetItem(out, 0, result); PyTuple_SetItem(out, 1, PyInt_FromLong(buffer.timestamp.tv_sec)); PyTuple_SetItem(out, 2, PyInt_FromLong(buffer.timestamp.tv_usec)); PyTuple_SetItem(out, 3, PyInt_FromLong(buffer.sequence)); - } + }*/ - if(queue && my_ioctl(self->fd, VIDIOC_QBUF, &buffer)) + //Queue next frame read + if(my_ioctl(fd, VIDIOC_QBUF, &buffer)) { - Py_RETURN_NONE; - }*/ + throw std::runtime_error("VIDIOC_QBUF failed"); + return 0; + } + + return 1; } void Run() @@ -905,7 +907,14 @@ class Device_manager_Worker_thread_args while(running) { usleep(1000); - this->ReadFrame(); + try + { + this->ReadFrame(); + } + catch(std::exception) + { + + } pthread_mutex_lock(&this->lock); running = !this->stop; @@ -1073,7 +1082,7 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) Py_RETURN_NONE; } - struct buffer *buffs = (struct buffer *)malloc(reqbuf.count * sizeof(struct buffer)); + struct buffer *buffs = new struct buffer [reqbuf.count]; (*self->buffers)[devarg] = buffs; if(!buffs) @@ -1232,7 +1241,7 @@ static PyObject *Device_manager_close(Device_manager *self, PyObject *args) { v4l2_munmap(buffers[i].start, buffers[i].length); } - free (buffers); + delete [] buffers; //Release memory self->buffers->erase(devarg); From b123b541be62fbbb492957e572f2ccc9dbf0174a Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 20 Oct 2013 06:56:03 +0100 Subject: [PATCH 022/256] Test function works, does all code need to be in worker? --- v4l2capture.cpp | 101 +++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 100 insertions(+), 1 deletion(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index d7cf849..691c282 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -909,7 +909,7 @@ class Device_manager_Worker_thread_args usleep(1000); try { - this->ReadFrame(); + //this->ReadFrame(); } catch(std::exception) { @@ -1256,6 +1256,102 @@ static PyObject *Device_manager_close(Device_manager *self, PyObject *args) Py_RETURN_NONE; } +static PyObject *Test(Device_manager *self, PyObject *args) +{ + //Process arguments + const char *devarg = NULL; + if(PyTuple_Size(args) >= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + else + { + devarg = "/dev/video0"; + } + + std::map::iterator it = self->buffers->find(devarg); + if(it == self->buffers->end()) + { + throw std::runtime_error("Buffers have not been created"); + Py_RETURN_NONE; + } + + struct v4l2_buffer buffer; + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; + int fd = (*self->fd)[devarg]; + printf("a %d\n", fd); + if(my_ioctl(fd, VIDIOC_DQBUF, &buffer)) + { + throw std::runtime_error("VIDIOC_DQBUF failed"); + Py_RETURN_NONE; + } + printf("b\n"); + #ifdef USE_LIBV4L + printf("Rx %d\n", buffer.bytesused); //self->buffers[buffer.index].start, buffer.bytesused + + #else + // Convert buffer from YUYV to RGB. + // For the byte order, see: http://v4l2spec.bytesex.org/spec/r4339.htm + // For the color conversion, see: http://v4l2spec.bytesex.org/spec/x2123.htm + int length = buffer.bytesused * 6 / 4; + PyObject *result = PyString_FromStringAndSize(NULL, length); + + if(!result) + { + throw std::runtime_error("String convert failed"); + Py_RETURN_NONE; + } + + char *rgb = PyString_AS_STRING(result); + char *rgb_max = rgb + length; + unsigned char *yuyv = self->buffers[buffer.index].start; + + #define CLAMP(c) ((c) <= 0 ? 0 : (c) >= 65025 ? 255 : (c) >> 8) + while(rgb < rgb_max) + { + int u = yuyv[1] - 128; + int v = yuyv[3] - 128; + int uv = 100 * u + 208 * v; + u *= 516; + v *= 409; + + int y = 298 * (yuyv[0] - 16); + rgb[0] = CLAMP(y + v); + rgb[1] = CLAMP(y - uv); + rgb[2] = CLAMP(y + u); + + y = 298 * (yuyv[2] - 16); + rgb[3] = CLAMP(y + v); + rgb[4] = CLAMP(y - uv); + rgb[5] = CLAMP(y + u); + + rgb += 6; + yuyv += 4; + } + #undef CLAMP + #endif + + /*if(1) + { + out = PyTuple_New(4); + PyTuple_SetItem(out, 0, result); + PyTuple_SetItem(out, 1, PyInt_FromLong(buffer.timestamp.tv_sec)); + PyTuple_SetItem(out, 2, PyInt_FromLong(buffer.timestamp.tv_usec)); + PyTuple_SetItem(out, 3, PyInt_FromLong(buffer.sequence)); + }*/ + + //Queue next frame read + if(my_ioctl(fd, VIDIOC_QBUF, &buffer)) + { + throw std::runtime_error("VIDIOC_QBUF failed"); + Py_RETURN_NONE; + } + + Py_RETURN_NONE; +} + // ********************************************************************* static PyMethodDef Video_device_methods[] = { @@ -1337,6 +1433,9 @@ static PyMethodDef Device_manager_methods[] = { {"close", (PyCFunction)Device_manager_close, METH_VARARGS, "close(dev = '\\dev\\video0')\n\n" "Close video device. Subsequent calls to other methods will fail."}, + {"test", (PyCFunction)Test, METH_VARARGS, + "test(dev = '\\dev\\video0')\n\n" + "testfunc."}, {NULL} }; From 6450b12b8208e3dc43eb1ff9b70c1de6828d4d43 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 20 Oct 2013 07:10:21 +0100 Subject: [PATCH 023/256] Moved functionality to worker but did not help --- v4l2capture.cpp | 340 +++++++++++++++++++----------------------------- 1 file changed, 134 insertions(+), 206 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 691c282..c516757 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -15,6 +15,7 @@ #include #include #include +#include #include #include @@ -779,6 +780,7 @@ class Device_manager_Worker_thread_args int stop; int stopped; pthread_mutex_t lock; + std::vector startDeviceFlag; Device_manager_Worker_thread_args() { @@ -812,6 +814,13 @@ class Device_manager_Worker_thread_args } }; + void StartDevice(const char *devName) + { + pthread_mutex_lock(&this->lock); + this->startDeviceFlag.push_back(devName); + pthread_mutex_unlock(&this->lock); + }; + int ReadFrame() { std::map::iterator it = self->buffers->find(this->devName); @@ -896,6 +905,118 @@ class Device_manager_Worker_thread_args return 1; } + int StartDeviceInternal(const char *devarg) + { + //Check this device has not already been start + std::map::iterator it = self->fd->find(devarg); + if(it==self->fd->end()) + { + PyErr_Format(PyExc_RuntimeError, "Device not open."); + return 0; + } + + int fd = (*self->fd)[devarg]; + + //Set other parameters for capture + //TODO + + /* + //Suggest an image size to the device. The device may choose and + //return another size if it doesn't support the suggested one. + self.video.set_format(reqSize[0], reqSize[1], fmt) + + //Query current pixel format + self.size_x, self.size_y, self.pixelFmt = self.video.get_format() + + //Set target frames per second + self.fps = self.video.set_fps(reqFps) + */ + + // Create a buffer to store image data in. This must be done before + // calling 'start' if v4l2capture is compiled with libv4l2. Otherwise + // raises IOError. + + int buffer_count = 10; + struct v4l2_requestbuffers reqbuf; + reqbuf.count = buffer_count; + reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + reqbuf.memory = V4L2_MEMORY_MMAP; + + if(my_ioctl(fd, VIDIOC_REQBUFS, &reqbuf)) + { + PyErr_SetString(PyExc_IOError, "VIDIOC_REQBUFS failed"); + return 0; + } + + if(!reqbuf.count) + { + PyErr_SetString(PyExc_IOError, "Not enough buffer memory"); + return 0; + } + + struct buffer *buffs = new struct buffer [reqbuf.count]; + (*self->buffers)[devarg] = buffs; + + if(!buffs) + { + PyErr_NoMemory(); + return 0; + } + + for(unsigned int i = 0; i < reqbuf.count; i++) + { + struct v4l2_buffer buffer; + buffer.index = i; + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; + + if(my_ioctl(fd, VIDIOC_QUERYBUF, &buffer)) + { + return 0; + } + + buffs[i].length = buffer.length; + buffs[i].start = v4l2_mmap(NULL, buffer.length, + PROT_READ | PROT_WRITE, MAP_SHARED, fd, buffer.m.offset); + + if(buffs[i].start == MAP_FAILED) + { + PyErr_SetFromErrno(PyExc_IOError); + return 0; + } + } + + (*self->buffer_counts)[devarg] = reqbuf.count; + buffer_count = reqbuf.count; + + // Send the buffer to the device. Some devices require this to be done + // before calling 'start'. + + for(int i = 0; i < buffer_count; i++) + { + struct v4l2_buffer buffer; + buffer.index = i; + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; + + if(my_ioctl(fd, VIDIOC_QBUF, &buffer)) + { + return 0; + } + } + + // Start the device. This lights the LED if it's a camera that has one. + enum v4l2_buf_type type; + type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + if(my_ioctl(fd, VIDIOC_STREAMON, &type)) + { + return 0; + } + + return 1; + } + void Run() { printf("Thread started\n"); @@ -909,13 +1030,23 @@ class Device_manager_Worker_thread_args usleep(1000); try { - //this->ReadFrame(); + this->ReadFrame(); } catch(std::exception) { } + pthread_mutex_lock(&this->lock); + if(this->startDeviceFlag.size() > 0) + { + std::string devName = this->startDeviceFlag[this->startDeviceFlag.size()-1]; + this->startDeviceFlag.pop_back(); + this->StartDeviceInternal(devName.c_str()); + } + + pthread_mutex_unlock(&this->lock); + pthread_mutex_lock(&this->lock); running = !this->stop; pthread_mutex_unlock(&this->lock); @@ -1036,118 +1167,14 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) buffer_count = PyInt_AsLong(pybufferarg); } - //Check this device has not already been start - std::map::iterator it = self->fd->find(devarg); - if(it==self->fd->end()) - { - PyErr_Format(PyExc_RuntimeError, "Device not open."); - Py_RETURN_NONE; - } - - int fd = (*self->fd)[devarg]; - - //Set other parameters for capture - //TODO - - /* - //Suggest an image size to the device. The device may choose and - //return another size if it doesn't support the suggested one. - self.video.set_format(reqSize[0], reqSize[1], fmt) - - //Query current pixel format - self.size_x, self.size_y, self.pixelFmt = self.video.get_format() - - //Set target frames per second - self.fps = self.video.set_fps(reqFps) - */ - - // Create a buffer to store image data in. This must be done before - // calling 'start' if v4l2capture is compiled with libv4l2. Otherwise - // raises IOError. - - struct v4l2_requestbuffers reqbuf; - reqbuf.count = buffer_count; - reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - reqbuf.memory = V4L2_MEMORY_MMAP; - - if(my_ioctl(fd, VIDIOC_REQBUFS, &reqbuf)) - { - PyErr_SetString(PyExc_IOError, "VIDIOC_REQBUFS failed"); - Py_RETURN_NONE; - } - - if(!reqbuf.count) - { - PyErr_SetString(PyExc_IOError, "Not enough buffer memory"); - Py_RETURN_NONE; - } - - struct buffer *buffs = new struct buffer [reqbuf.count]; - (*self->buffers)[devarg] = buffs; - - if(!buffs) - { - PyErr_NoMemory(); - Py_RETURN_NONE; - } - - for(unsigned int i = 0; i < reqbuf.count; i++) - { - struct v4l2_buffer buffer; - buffer.index = i; - buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - buffer.memory = V4L2_MEMORY_MMAP; - - if(my_ioctl(fd, VIDIOC_QUERYBUF, &buffer)) - { - Py_RETURN_NONE; - } - - buffs[i].length = buffer.length; - buffs[i].start = v4l2_mmap(NULL, buffer.length, - PROT_READ | PROT_WRITE, MAP_SHARED, fd, buffer.m.offset); - - if(buffs[i].start == MAP_FAILED) - { - PyErr_SetFromErrno(PyExc_IOError); - Py_RETURN_NONE; - } - } - - (*self->buffer_counts)[devarg] = reqbuf.count; - buffer_count = reqbuf.count; - - // Send the buffer to the device. Some devices require this to be done - // before calling 'start'. - - for(int i = 0; i < buffer_count; i++) - { - struct v4l2_buffer buffer; - buffer.index = i; - buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - buffer.memory = V4L2_MEMORY_MMAP; - - if(my_ioctl(fd, VIDIOC_QBUF, &buffer)) - { - Py_RETURN_NONE; - } - } - - // Start the device. This lights the LED if it's a camera that has one. - enum v4l2_buf_type type; - type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - - if(my_ioctl(fd, VIDIOC_STREAMON, &type)) - { - Py_RETURN_NONE; - } - pthread_t thread; Device_manager_Worker_thread_args *threadArgs = new Device_manager_Worker_thread_args; (*self->threadArgStore)[devarg] = threadArgs; threadArgs->self = self; threadArgs->devName = devarg; pthread_create(&thread, NULL, Device_manager_Worker_thread, threadArgs); + + threadArgs->StartDevice(devarg); Py_RETURN_NONE; } @@ -1256,102 +1283,6 @@ static PyObject *Device_manager_close(Device_manager *self, PyObject *args) Py_RETURN_NONE; } -static PyObject *Test(Device_manager *self, PyObject *args) -{ - //Process arguments - const char *devarg = NULL; - if(PyTuple_Size(args) >= 1) - { - PyObject *pydevarg = PyTuple_GetItem(args, 0); - devarg = PyString_AsString(pydevarg); - } - else - { - devarg = "/dev/video0"; - } - - std::map::iterator it = self->buffers->find(devarg); - if(it == self->buffers->end()) - { - throw std::runtime_error("Buffers have not been created"); - Py_RETURN_NONE; - } - - struct v4l2_buffer buffer; - buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - buffer.memory = V4L2_MEMORY_MMAP; - int fd = (*self->fd)[devarg]; - printf("a %d\n", fd); - if(my_ioctl(fd, VIDIOC_DQBUF, &buffer)) - { - throw std::runtime_error("VIDIOC_DQBUF failed"); - Py_RETURN_NONE; - } - printf("b\n"); - #ifdef USE_LIBV4L - printf("Rx %d\n", buffer.bytesused); //self->buffers[buffer.index].start, buffer.bytesused - - #else - // Convert buffer from YUYV to RGB. - // For the byte order, see: http://v4l2spec.bytesex.org/spec/r4339.htm - // For the color conversion, see: http://v4l2spec.bytesex.org/spec/x2123.htm - int length = buffer.bytesused * 6 / 4; - PyObject *result = PyString_FromStringAndSize(NULL, length); - - if(!result) - { - throw std::runtime_error("String convert failed"); - Py_RETURN_NONE; - } - - char *rgb = PyString_AS_STRING(result); - char *rgb_max = rgb + length; - unsigned char *yuyv = self->buffers[buffer.index].start; - - #define CLAMP(c) ((c) <= 0 ? 0 : (c) >= 65025 ? 255 : (c) >> 8) - while(rgb < rgb_max) - { - int u = yuyv[1] - 128; - int v = yuyv[3] - 128; - int uv = 100 * u + 208 * v; - u *= 516; - v *= 409; - - int y = 298 * (yuyv[0] - 16); - rgb[0] = CLAMP(y + v); - rgb[1] = CLAMP(y - uv); - rgb[2] = CLAMP(y + u); - - y = 298 * (yuyv[2] - 16); - rgb[3] = CLAMP(y + v); - rgb[4] = CLAMP(y - uv); - rgb[5] = CLAMP(y + u); - - rgb += 6; - yuyv += 4; - } - #undef CLAMP - #endif - - /*if(1) - { - out = PyTuple_New(4); - PyTuple_SetItem(out, 0, result); - PyTuple_SetItem(out, 1, PyInt_FromLong(buffer.timestamp.tv_sec)); - PyTuple_SetItem(out, 2, PyInt_FromLong(buffer.timestamp.tv_usec)); - PyTuple_SetItem(out, 3, PyInt_FromLong(buffer.sequence)); - }*/ - - //Queue next frame read - if(my_ioctl(fd, VIDIOC_QBUF, &buffer)) - { - throw std::runtime_error("VIDIOC_QBUF failed"); - Py_RETURN_NONE; - } - - Py_RETURN_NONE; -} - // ********************************************************************* static PyMethodDef Video_device_methods[] = { @@ -1433,9 +1364,6 @@ static PyMethodDef Device_manager_methods[] = { {"close", (PyCFunction)Device_manager_close, METH_VARARGS, "close(dev = '\\dev\\video0')\n\n" "Close video device. Subsequent calls to other methods will fail."}, - {"test", (PyCFunction)Test, METH_VARARGS, - "test(dev = '\\dev\\video0')\n\n" - "testfunc."}, {NULL} }; From c93f4557edb95d0adbc9f7f3c11d072848f07173 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 20 Oct 2013 07:29:45 +0100 Subject: [PATCH 024/256] Moved functionality to worker but did not help --- v4l2capture.cpp | 90 +++++++++++++++++++++++++++++++------------------ 1 file changed, 57 insertions(+), 33 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index c516757..dd5f930 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -780,12 +780,16 @@ class Device_manager_Worker_thread_args int stop; int stopped; pthread_mutex_t lock; + std::vector openDeviceFlag; std::vector startDeviceFlag; + int deviceStarted; - Device_manager_Worker_thread_args() + Device_manager_Worker_thread_args(const char *devNameIn) { stop = 0; stopped = 1; + deviceStarted = 0; + this->devName = devNameIn; pthread_mutex_init(&lock, NULL); }; @@ -814,10 +818,17 @@ class Device_manager_Worker_thread_args } }; - void StartDevice(const char *devName) + void OpenDevice() { pthread_mutex_lock(&this->lock); - this->startDeviceFlag.push_back(devName); + this->openDeviceFlag.push_back(this->devName.c_str()); + pthread_mutex_unlock(&this->lock); + }; + + void StartDevice() + { + pthread_mutex_lock(&this->lock); + this->startDeviceFlag.push_back(this->devName.c_str()); pthread_mutex_unlock(&this->lock); }; @@ -905,6 +916,23 @@ class Device_manager_Worker_thread_args return 1; } + int OpenDeviceInternal(const char *devarg) + { + //Open the video device. + int fd = v4l2_open(devarg, O_RDWR | O_NONBLOCK); + + if(fd < 0) + { + PyErr_SetFromErrnoWithFilename(PyExc_IOError, devarg); + return 0; + } + + (*self->fd)[devarg] = fd; + (*self->buffers)[devarg] = NULL; + this->deviceStarted = 0; + return 1; + } + int StartDeviceInternal(const char *devarg) { //Check this device has not already been start @@ -1014,6 +1042,7 @@ class Device_manager_Worker_thread_args return 0; } + this->deviceStarted = 1; return 1; } @@ -1029,8 +1058,8 @@ class Device_manager_Worker_thread_args { usleep(1000); try - { - this->ReadFrame(); + { + if(deviceStarted) this->ReadFrame(); } catch(std::exception) { @@ -1044,7 +1073,15 @@ class Device_manager_Worker_thread_args this->startDeviceFlag.pop_back(); this->StartDeviceInternal(devName.c_str()); } + pthread_mutex_unlock(&this->lock); + pthread_mutex_lock(&this->lock); + if(this->openDeviceFlag.size() > 0) + { + std::string devName = this->openDeviceFlag[this->openDeviceFlag.size()-1]; + this->openDeviceFlag.pop_back(); + this->OpenDeviceInternal(devName.c_str()); + } pthread_mutex_unlock(&this->lock); pthread_mutex_lock(&this->lock); @@ -1129,17 +1166,13 @@ static PyObject *Device_manager_open(Device_manager *self, PyObject *args) Py_RETURN_NONE; } - //Open the video device. - int fd = v4l2_open(devarg, O_RDWR | O_NONBLOCK); - - if(fd < 0) - { - PyErr_SetFromErrnoWithFilename(PyExc_IOError, devarg); - Py_RETURN_NONE; - } + pthread_t thread; + Device_manager_Worker_thread_args *threadArgs = new Device_manager_Worker_thread_args(devarg); + (*self->threadArgStore)[devarg] = threadArgs; + threadArgs->self = self; + pthread_create(&thread, NULL, Device_manager_Worker_thread, threadArgs); - (*self->fd)[devarg] = fd; - (*self->buffers)[devarg] = NULL; + threadArgs->OpenDevice(); Py_RETURN_NONE; } @@ -1167,14 +1200,8 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) buffer_count = PyInt_AsLong(pybufferarg); } - pthread_t thread; - Device_manager_Worker_thread_args *threadArgs = new Device_manager_Worker_thread_args; - (*self->threadArgStore)[devarg] = threadArgs; - threadArgs->self = self; - threadArgs->devName = devarg; - pthread_create(&thread, NULL, Device_manager_Worker_thread, threadArgs); - - threadArgs->StartDevice(devarg); + class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; + threadArgs->StartDevice(); Py_RETURN_NONE; } @@ -1207,9 +1234,6 @@ static PyObject *Device_manager_stop(Device_manager *self, PyObject *args) Py_RETURN_NONE; } - //Stop worker thread - (*self->threadArgStore)[devarg]->Stop(); - //Signal V4l2 api enum v4l2_buf_type type; type = V4L2_BUF_TYPE_VIDEO_CAPTURE; @@ -1219,11 +1243,6 @@ static PyObject *Device_manager_stop(Device_manager *self, PyObject *args) Py_RETURN_NONE; } - //Release memeory - (*self->threadArgStore)[devarg]->WaitForStop(); - delete (*self->threadArgStore)[devarg]; - self->threadArgStore->erase(devarg); - Py_RETURN_NONE; } @@ -1245,8 +1264,13 @@ static PyObject *Device_manager_close(Device_manager *self, PyObject *args) std::map::iterator it3 = self->threadArgStore->find(devarg); if(it3 != self->threadArgStore->end()) { - //Stop thread that is still running - Device_manager_stop(self, args); + //Stop worker thread + (*self->threadArgStore)[devarg]->Stop(); + + //Release memeory + (*self->threadArgStore)[devarg]->WaitForStop(); + delete (*self->threadArgStore)[devarg]; + self->threadArgStore->erase(devarg); } std::map::iterator it = self->fd->find(devarg); From 83b5afed4826a13ff28e3a332ddfaad94e6e4001 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 20 Oct 2013 08:05:08 +0100 Subject: [PATCH 025/256] Moved almost everything to oop design but errors still occur --- v4l2capture.cpp | 254 ++++++++++++++++++++++++------------------------ 1 file changed, 125 insertions(+), 129 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index dd5f930..a466a7e 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -52,9 +52,6 @@ typedef struct { class Device_manager_cl{ public: PyObject_HEAD - std::map *fd; - std::map *buffers; - std::map *buffer_counts; std::map *threadArgStore; }; typedef Device_manager_cl Device_manager; @@ -782,7 +779,12 @@ class Device_manager_Worker_thread_args pthread_mutex_t lock; std::vector openDeviceFlag; std::vector startDeviceFlag; + int stopDeviceFlag; + int closeDeviceFlag; int deviceStarted; + int fd; + struct buffer *buffers; + int buffer_counts; Device_manager_Worker_thread_args(const char *devNameIn) { @@ -791,10 +793,27 @@ class Device_manager_Worker_thread_args deviceStarted = 0; this->devName = devNameIn; pthread_mutex_init(&lock, NULL); + buffer_counts = 10; + buffers = NULL; + stopDeviceFlag = 0; + closeDeviceFlag = 0; }; virtual ~Device_manager_Worker_thread_args() { + if(deviceStarted) + { + this->StopDeviceInternal(); + } + + if(fd!=-1) + { + this->CloseDeviceInternal(); + } + + if(buffers) delete [] buffers; + this->buffers = NULL; + pthread_mutex_destroy(&lock); }; @@ -832,10 +851,23 @@ class Device_manager_Worker_thread_args pthread_mutex_unlock(&this->lock); }; + void StopDevice() + { + pthread_mutex_lock(&this->lock); + this->stopDeviceFlag = 1; + pthread_mutex_unlock(&this->lock); + }; + + void CloseDevice() + { + pthread_mutex_lock(&this->lock); + this->closeDeviceFlag = 1; + pthread_mutex_unlock(&this->lock); + }; + int ReadFrame() { - std::map::iterator it = self->buffers->find(this->devName); - if(it == self->buffers->end()) + if(this->buffers == NULL) { throw std::runtime_error("Buffers have not been created"); return 0; @@ -844,8 +876,7 @@ class Device_manager_Worker_thread_args struct v4l2_buffer buffer; buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buffer.memory = V4L2_MEMORY_MMAP; - int fd = (*self->fd)[this->devName]; - printf("a %d\n", fd); + printf("a %d\n", this->fd); if(my_ioctl(fd, VIDIOC_DQBUF, &buffer)) { throw std::runtime_error("VIDIOC_DQBUF failed"); @@ -916,35 +947,32 @@ class Device_manager_Worker_thread_args return 1; } - int OpenDeviceInternal(const char *devarg) + int OpenDeviceInternal() { + printf("OpenDeviceInternal\n"); //Open the video device. - int fd = v4l2_open(devarg, O_RDWR | O_NONBLOCK); + this->fd = v4l2_open(this->devName.c_str(), O_RDWR | O_NONBLOCK); if(fd < 0) { - PyErr_SetFromErrnoWithFilename(PyExc_IOError, devarg); + PyErr_SetFromErrnoWithFilename(PyExc_IOError, this->devName.c_str()); return 0; } - (*self->fd)[devarg] = fd; - (*self->buffers)[devarg] = NULL; this->deviceStarted = 0; return 1; } - int StartDeviceInternal(const char *devarg) + int StartDeviceInternal() { + printf("StartDeviceInternal\n"); //Check this device has not already been start - std::map::iterator it = self->fd->find(devarg); - if(it==self->fd->end()) + if(this->fd==-1) { PyErr_Format(PyExc_RuntimeError, "Device not open."); return 0; } - int fd = (*self->fd)[devarg]; - //Set other parameters for capture //TODO @@ -970,7 +998,7 @@ class Device_manager_Worker_thread_args reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; reqbuf.memory = V4L2_MEMORY_MMAP; - if(my_ioctl(fd, VIDIOC_REQBUFS, &reqbuf)) + if(my_ioctl(this->fd, VIDIOC_REQBUFS, &reqbuf)) { PyErr_SetString(PyExc_IOError, "VIDIOC_REQBUFS failed"); return 0; @@ -982,10 +1010,9 @@ class Device_manager_Worker_thread_args return 0; } - struct buffer *buffs = new struct buffer [reqbuf.count]; - (*self->buffers)[devarg] = buffs; + this->buffers = new struct buffer [reqbuf.count]; - if(!buffs) + if(this->buffers == NULL) { PyErr_NoMemory(); return 0; @@ -1003,19 +1030,18 @@ class Device_manager_Worker_thread_args return 0; } - buffs[i].length = buffer.length; - buffs[i].start = v4l2_mmap(NULL, buffer.length, + this->buffers[i].length = buffer.length; + this->buffers[i].start = v4l2_mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buffer.m.offset); - if(buffs[i].start == MAP_FAILED) + if(this->buffers[i].start == MAP_FAILED) { PyErr_SetFromErrno(PyExc_IOError); return 0; } } - (*self->buffer_counts)[devarg] = reqbuf.count; - buffer_count = reqbuf.count; + this->buffer_counts = reqbuf.count; // Send the buffer to the device. Some devices require this to be done // before calling 'start'. @@ -1046,6 +1072,45 @@ class Device_manager_Worker_thread_args return 1; } + void StopDeviceInternal() + { + if(this->fd==-1) + { + PyErr_Format(PyExc_RuntimeError, "Device not started."); + return; + } + + //Signal V4l2 api + enum v4l2_buf_type type; + type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + if(my_ioctl(this->fd, VIDIOC_STREAMOFF, &type)) + { + return; + } + } + + int CloseDeviceInternal() + { + if(this->fd == -1) + { + PyErr_Format(PyExc_RuntimeError, "Device not started."); + return 0; + } + + for(int i = 0; i < this->buffer_counts; i++) + { + v4l2_munmap(buffers[i].start, buffers[i].length); + } + delete [] this->buffers; + this->buffers = NULL; + + //Release memory + v4l2_close(fd); + fd = -1; + return 1; + } + void Run() { printf("Thread started\n"); @@ -1071,7 +1136,7 @@ class Device_manager_Worker_thread_args { std::string devName = this->startDeviceFlag[this->startDeviceFlag.size()-1]; this->startDeviceFlag.pop_back(); - this->StartDeviceInternal(devName.c_str()); + this->StartDeviceInternal(); } pthread_mutex_unlock(&this->lock); @@ -1080,7 +1145,23 @@ class Device_manager_Worker_thread_args { std::string devName = this->openDeviceFlag[this->openDeviceFlag.size()-1]; this->openDeviceFlag.pop_back(); - this->OpenDeviceInternal(devName.c_str()); + this->OpenDeviceInternal(); + } + pthread_mutex_unlock(&this->lock); + + pthread_mutex_lock(&this->lock); + if(this->stopDeviceFlag) + { + this->StopDeviceInternal(); + this->stopDeviceFlag = 0; + } + pthread_mutex_unlock(&this->lock); + + pthread_mutex_lock(&this->lock); + if(this->closeDeviceFlag) + { + this->CloseDeviceInternal(); + this->closeDeviceFlag = 0; } pthread_mutex_unlock(&this->lock); @@ -1117,19 +1198,6 @@ static void Device_manager_dealloc(Device_manager *self) Py_DECREF(args); } - //Close devices - for(std::map::iterator it = self->fd->begin(); - it != self->fd->end(); it++) - { - PyObject *args = PyTuple_New(1); - PyTuple_SetItem(args, 0, PyString_FromString(it->first.c_str())); - Device_manager_close(self, args); - Py_DECREF(args); - } - - delete self->fd; - delete self->buffers; - delete self->buffer_counts; delete self->threadArgStore; self->ob_type->tp_free((PyObject *)self); } @@ -1137,9 +1205,6 @@ static void Device_manager_dealloc(Device_manager *self) static int Device_manager_init(Device_manager *self, PyObject *args, PyObject *kwargs) { - self->fd = new std::map; - self->buffers = new std::map; - self->buffer_counts = new std::map; self->threadArgStore = new std::map; return 0; } @@ -1147,20 +1212,16 @@ static int Device_manager_init(Device_manager *self, PyObject *args, static PyObject *Device_manager_open(Device_manager *self, PyObject *args) { //Process arguments - const char *devarg = NULL; + const char *devarg = "/dev/video0"; if(PyTuple_Size(args) >= 1) { PyObject *pydevarg = PyTuple_GetItem(args, 0); devarg = PyString_AsString(pydevarg); } - else - { - devarg = "/dev/video0"; - } //Check this device has not already been opened - std::map::iterator it = self->fd->find(devarg); - if(it!=self->fd->end()) + std::map::iterator it = self->threadArgStore->find(devarg); + if(it!=self->threadArgStore->end()) { PyErr_Format(PyExc_RuntimeError, "Device already opened."); Py_RETURN_NONE; @@ -1182,16 +1243,12 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) { //Process arguments - const char *devarg = NULL; + const char *devarg = "/dev/video0"; if(PyTuple_Size(args) >= 1) { PyObject *pydevarg = PyTuple_GetItem(args, 0); devarg = PyString_AsString(pydevarg); } - else - { - devarg = "/dev/video0"; - } long buffer_count = 10; if(PyTuple_Size(args) >= 4) @@ -1209,39 +1266,15 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) static PyObject *Device_manager_stop(Device_manager *self, PyObject *args) { //Process arguments - - const char *devarg = NULL; + const char *devarg = "/dev/video0"; if(PyTuple_Size(args) >= 1) { PyObject *pydevarg = PyTuple_GetItem(args, 0); devarg = PyString_AsString(pydevarg); } - else - { - devarg = "/dev/video0"; - } - - std::map::iterator it = self->fd->find(devarg); - if(it==self->fd->end()) - { - PyErr_Format(PyExc_RuntimeError, "Device not started."); - Py_RETURN_NONE; - } - if((*self->fd)[devarg] < 0) - { - PyErr_SetString(PyExc_ValueError, "I/O operation on closed file"); - Py_RETURN_NONE; - } - - //Signal V4l2 api - enum v4l2_buf_type type; - type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - - if(my_ioctl((*self->fd)[devarg], VIDIOC_STREAMOFF, &type)) - { - Py_RETURN_NONE; - } + class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; + threadArgs->StopDevice(); Py_RETURN_NONE; } @@ -1249,60 +1282,23 @@ static PyObject *Device_manager_stop(Device_manager *self, PyObject *args) static PyObject *Device_manager_close(Device_manager *self, PyObject *args) { //Process arguments - const char *devarg = NULL; + const char *devarg = "/dev/video0"; if(PyTuple_Size(args) >= 1) { PyObject *pydevarg = PyTuple_GetItem(args, 0); devarg = PyString_AsString(pydevarg); } - else - { - devarg = "/dev/video0"; - } - //Check if thread is still running - std::map::iterator it3 = self->threadArgStore->find(devarg); - if(it3 != self->threadArgStore->end()) - { - //Stop worker thread - (*self->threadArgStore)[devarg]->Stop(); - - //Release memeory - (*self->threadArgStore)[devarg]->WaitForStop(); - delete (*self->threadArgStore)[devarg]; - self->threadArgStore->erase(devarg); - } - - std::map::iterator it = self->fd->find(devarg); - if(it==self->fd->end()) - { - PyErr_Format(PyExc_RuntimeError, "Device not started."); - Py_RETURN_NONE; - } - - int fd = (*self->fd)[devarg]; - - std::map::iterator it2 = self->buffers->find(devarg); - if(it2 != self->buffers->end()) - { - struct buffer *buffers = (*self->buffers)[devarg]; - int buffer_count = (*self->buffer_counts)[devarg]; - - for(int i = 0; i < buffer_count; i++) - { - v4l2_munmap(buffers[i].start, buffers[i].length); - } - delete [] buffers; + class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; + threadArgs->CloseDevice(); - //Release memory - self->buffers->erase(devarg); - self->buffer_counts->erase(devarg); - } + //Stop worker thread + threadArgs->Stop(); - //Release memory - v4l2_close(fd); - fd = -1; - self->fd->erase(devarg); + //Release memeory + threadArgs->WaitForStop(); + delete threadArgs; + self->threadArgStore->erase(devarg); Py_RETURN_NONE; } From ac7f2aa94b5aff75abbc2719806900e3fddb6fa5 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 20 Oct 2013 08:19:53 +0100 Subject: [PATCH 026/256] Tidy code but still seg faulting --- v4l2capture.cpp | 73 +++++++++++++++++++++++-------------------------- 1 file changed, 34 insertions(+), 39 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index a466a7e..39fc1a8 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -182,16 +182,16 @@ static PyObject *Video_device_get_info(Video_device *self) struct v4l2_capability caps; if(my_ioctl(self->fd, VIDIOC_QUERYCAP, &caps)) - { - Py_RETURN_NONE; - } + { + Py_RETURN_NONE; + } PyObject *set = PySet_New(NULL); if(!set) - { - Py_RETURN_NONE; - } + { + Py_RETURN_NONE; + } struct capability *capability = capabilities; @@ -870,17 +870,15 @@ class Device_manager_Worker_thread_args if(this->buffers == NULL) { throw std::runtime_error("Buffers have not been created"); - return 0; } struct v4l2_buffer buffer; buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buffer.memory = V4L2_MEMORY_MMAP; - printf("a %d\n", this->fd); + printf("a %d %ld\n", this->fd, (long) &buffer); if(my_ioctl(fd, VIDIOC_DQBUF, &buffer)) { throw std::runtime_error("VIDIOC_DQBUF failed"); - return 0; } printf("b\n"); #ifdef USE_LIBV4L @@ -896,7 +894,6 @@ class Device_manager_Worker_thread_args if(!result) { throw std::runtime_error("String convert failed"); - return 0; } char *rgb = PyString_AS_STRING(result); @@ -941,7 +938,6 @@ class Device_manager_Worker_thread_args if(my_ioctl(fd, VIDIOC_QBUF, &buffer)) { throw std::runtime_error("VIDIOC_QBUF failed"); - return 0; } return 1; @@ -955,8 +951,7 @@ class Device_manager_Worker_thread_args if(fd < 0) { - PyErr_SetFromErrnoWithFilename(PyExc_IOError, this->devName.c_str()); - return 0; + throw std::runtime_error("Error opening device"); } this->deviceStarted = 0; @@ -969,8 +964,7 @@ class Device_manager_Worker_thread_args //Check this device has not already been start if(this->fd==-1) { - PyErr_Format(PyExc_RuntimeError, "Device not open."); - return 0; + throw std::runtime_error("Device not open"); } //Set other parameters for capture @@ -1000,22 +994,19 @@ class Device_manager_Worker_thread_args if(my_ioctl(this->fd, VIDIOC_REQBUFS, &reqbuf)) { - PyErr_SetString(PyExc_IOError, "VIDIOC_REQBUFS failed"); - return 0; + throw std::runtime_error("VIDIOC_REQBUFS failed"); } if(!reqbuf.count) { - PyErr_SetString(PyExc_IOError, "Not enough buffer memory"); - return 0; + throw std::runtime_error("Not enough buffer memory"); } this->buffers = new struct buffer [reqbuf.count]; if(this->buffers == NULL) { - PyErr_NoMemory(); - return 0; + throw std::runtime_error("Failed to allocate buffer memory"); } for(unsigned int i = 0; i < reqbuf.count; i++) @@ -1027,7 +1018,7 @@ class Device_manager_Worker_thread_args if(my_ioctl(fd, VIDIOC_QUERYBUF, &buffer)) { - return 0; + throw std::runtime_error("VIDIOC_QUERYBUF failed"); } this->buffers[i].length = buffer.length; @@ -1036,8 +1027,7 @@ class Device_manager_Worker_thread_args if(this->buffers[i].start == MAP_FAILED) { - PyErr_SetFromErrno(PyExc_IOError); - return 0; + throw std::runtime_error("v4l2_mmap failed"); } } @@ -1055,7 +1045,7 @@ class Device_manager_Worker_thread_args if(my_ioctl(fd, VIDIOC_QBUF, &buffer)) { - return 0; + throw std::runtime_error("VIDIOC_QBUF failed"); } } @@ -1065,7 +1055,7 @@ class Device_manager_Worker_thread_args if(my_ioctl(fd, VIDIOC_STREAMON, &type)) { - return 0; + throw std::runtime_error("VIDIOC_STREAMON failed"); } this->deviceStarted = 1; @@ -1076,8 +1066,7 @@ class Device_manager_Worker_thread_args { if(this->fd==-1) { - PyErr_Format(PyExc_RuntimeError, "Device not started."); - return; + throw std::runtime_error("Device not started"); } //Signal V4l2 api @@ -1086,18 +1075,22 @@ class Device_manager_Worker_thread_args if(my_ioctl(this->fd, VIDIOC_STREAMOFF, &type)) { - return; + throw std::runtime_error("VIDIOC_STREAMOFF failed"); } + + this->deviceStarted = 0; } int CloseDeviceInternal() { if(this->fd == -1) { - PyErr_Format(PyExc_RuntimeError, "Device not started."); - return 0; + throw std::runtime_error("Device not open"); } + if(this->deviceStarted) + StopDeviceInternal(); + for(int i = 0; i < this->buffer_counts; i++) { v4l2_munmap(buffers[i].start, buffers[i].length); @@ -1113,23 +1106,19 @@ class Device_manager_Worker_thread_args void Run() { - printf("Thread started\n"); + printf("Thread started: %s\n", this->devName.c_str()); int running = 1; pthread_mutex_lock(&this->lock); this->stopped = 0; pthread_mutex_unlock(&this->lock); + try + { while(running) { usleep(1000); - try - { - if(deviceStarted) this->ReadFrame(); - } - catch(std::exception) - { - } + if(deviceStarted) this->ReadFrame(); pthread_mutex_lock(&this->lock); if(this->startDeviceFlag.size() > 0) @@ -1169,6 +1158,12 @@ class Device_manager_Worker_thread_args running = !this->stop; pthread_mutex_unlock(&this->lock); } + } + catch(std::exception) + { + + } + printf("Thread stopping\n"); pthread_mutex_lock(&this->lock); this->stopped = 1; From 39279982d25b1426f089e019401c5d69ae33d2c0 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 20 Oct 2013 08:39:08 +0100 Subject: [PATCH 027/256] Still not found bug --- v4l2capture.cpp | 106 +++++++++++++++++++++++++++--------------------- 1 file changed, 59 insertions(+), 47 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 39fc1a8..812650d 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -867,77 +867,86 @@ class Device_manager_Worker_thread_args int ReadFrame() { + printf("ReadFrame\n"); + if(this->fd<0) + throw std::runtime_error("File not open"); + if(this->buffers == NULL) - { throw std::runtime_error("Buffers have not been created"); - } struct v4l2_buffer buffer; buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buffer.memory = V4L2_MEMORY_MMAP; - printf("a %d %ld\n", this->fd, (long) &buffer); - if(my_ioctl(fd, VIDIOC_DQBUF, &buffer)) + + if(my_ioctl(this->fd, VIDIOC_DQBUF, &buffer)) { throw std::runtime_error("VIDIOC_DQBUF failed"); } - printf("b\n"); - #ifdef USE_LIBV4L - printf("Rx %d\n", buffer.bytesused); //self->buffers[buffer.index].start, buffer.bytesused + #ifdef USE_LIBV4L + printf("rx %d\n", buffer.bytesused); + //PyObject *result = PyString_FromStringAndSize( + // (const char*)self->buffers[buffer.index].start, buffer.bytesused); + + //if(!result) + // { + // Py_RETURN_NONE; + // } #else - // Convert buffer from YUYV to RGB. - // For the byte order, see: http://v4l2spec.bytesex.org/spec/r4339.htm - // For the color conversion, see: http://v4l2spec.bytesex.org/spec/x2123.htm - int length = buffer.bytesused * 6 / 4; - PyObject *result = PyString_FromStringAndSize(NULL, length); + // Convert buffer from YUYV to RGB. + // For the byte order, see: http://v4l2spec.bytesex.org/spec/r4339.htm + // For the color conversion, see: http://v4l2spec.bytesex.org/spec/x2123.htm + int length = buffer.bytesused * 6 / 4; + PyObject *result = PyString_FromStringAndSize(NULL, length); - if(!result) - { - throw std::runtime_error("String convert failed"); - } + if(!result) + { + Py_RETURN_NONE; + } - char *rgb = PyString_AS_STRING(result); - char *rgb_max = rgb + length; - unsigned char *yuyv = self->buffers[buffer.index].start; + char *rgb = PyString_AS_STRING(result); + char *rgb_max = rgb + length; + unsigned char *yuyv = self->buffers[buffer.index].start; #define CLAMP(c) ((c) <= 0 ? 0 : (c) >= 65025 ? 255 : (c) >> 8) - while(rgb < rgb_max) - { - int u = yuyv[1] - 128; - int v = yuyv[3] - 128; - int uv = 100 * u + 208 * v; - u *= 516; - v *= 409; - - int y = 298 * (yuyv[0] - 16); - rgb[0] = CLAMP(y + v); - rgb[1] = CLAMP(y - uv); - rgb[2] = CLAMP(y + u); - - y = 298 * (yuyv[2] - 16); - rgb[3] = CLAMP(y + v); - rgb[4] = CLAMP(y - uv); - rgb[5] = CLAMP(y + u); - - rgb += 6; - yuyv += 4; - } + while(rgb < rgb_max) + { + int u = yuyv[1] - 128; + int v = yuyv[3] - 128; + int uv = 100 * u + 208 * v; + u *= 516; + v *= 409; + + int y = 298 * (yuyv[0] - 16); + rgb[0] = CLAMP(y + v); + rgb[1] = CLAMP(y - uv); + rgb[2] = CLAMP(y + u); + + y = 298 * (yuyv[2] - 16); + rgb[3] = CLAMP(y + v); + rgb[4] = CLAMP(y - uv); + rgb[5] = CLAMP(y + u); + + rgb += 6; + yuyv += 4; + } #undef CLAMP #endif - /*if(1) + //PyObject *out = result; + + if(1) { - out = PyTuple_New(4); + /*out = PyTuple_New(4); PyTuple_SetItem(out, 0, result); PyTuple_SetItem(out, 1, PyInt_FromLong(buffer.timestamp.tv_sec)); PyTuple_SetItem(out, 2, PyInt_FromLong(buffer.timestamp.tv_usec)); - PyTuple_SetItem(out, 3, PyInt_FromLong(buffer.sequence)); - }*/ + PyTuple_SetItem(out, 3, PyInt_FromLong(buffer.sequence));*/ + } - //Queue next frame read - if(my_ioctl(fd, VIDIOC_QBUF, &buffer)) + if(my_ioctl(this->fd, VIDIOC_QBUF, &buffer)) { - throw std::runtime_error("VIDIOC_QBUF failed"); + //Py_RETURN_NONE; } return 1; @@ -1059,11 +1068,13 @@ class Device_manager_Worker_thread_args } this->deviceStarted = 1; + printf("Started ok\n"); return 1; } void StopDeviceInternal() { + printf("StopDeviceInternal\n"); if(this->fd==-1) { throw std::runtime_error("Device not started"); @@ -1083,6 +1094,7 @@ class Device_manager_Worker_thread_args int CloseDeviceInternal() { + printf("CloseDeviceInternal\n"); if(this->fd == -1) { throw std::runtime_error("Device not open"); From 8bbb2b4d31a19acd62a33810f4d1c669c9052091 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 20 Oct 2013 10:39:20 +0100 Subject: [PATCH 028/256] Applied fix to main problem but now swamped by warnings --- v4l2capture.cpp | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 812650d..1a680f1 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -88,20 +88,19 @@ static int my_ioctl(int fd, int request, void *arg) // Retry ioctl until it returns without being interrupted. for(;;) - { - int result = v4l2_ioctl(fd, request, arg); - - if(!result) { - return 0; - } + int result = v4l2_ioctl(fd, request, arg); - if(errno != EINTR) - { - PyErr_SetFromErrno(PyExc_IOError); - return 1; - } + if(!result) + return 0; + + if(errno != EINTR && errno != EAGAIN) + { + PyErr_SetFromErrno(PyExc_IOError); + return 1; } + usleep(1000); + } } static void Video_device_unmap(Video_device *self) From c98659a7d4da113fcfd1bb341c1c8fa2e76eb5e7 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 20 Oct 2013 11:34:08 +0100 Subject: [PATCH 029/256] Use select to check webcam ready --- v4l2capture.cpp | 42 +++++++++++++++++++++++++++++++++++------- 1 file changed, 35 insertions(+), 7 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 1a680f1..70f55c2 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -83,18 +83,46 @@ static struct capability capabilities[] = { { V4L2_CAP_VIDEO_OVERLAY, "video_overlay" } }; -static int my_ioctl(int fd, int request, void *arg) +static int my_ioctl(int fd, int request, void *arg, int utimeout = -1) { // Retry ioctl until it returns without being interrupted. for(;;) { + // Wait for frame until time out + if(utimeout >= 0) + { + + fd_set fds; + FD_ZERO (&fds); + FD_SET (fd, &fds); + + struct timeval tv; + tv.tv_sec = 0; + tv.tv_usec = utimeout; + int r = select(fd+1, &fds, NULL, NULL, &tv); + + if(r == 0) + return 1; //Timed out + } + + //printf("call\n"); int result = v4l2_ioctl(fd, request, arg); + //printf("%d\n", result); if(!result) + { + //printf("ret\n"); return 0; + } - if(errno != EINTR && errno != EAGAIN) + if(errno == EAGAIN) + { + //printf("ret\n"); + return 1; + } + + if(errno != EINTR) { PyErr_SetFromErrno(PyExc_IOError); return 1; @@ -866,7 +894,6 @@ class Device_manager_Worker_thread_args int ReadFrame() { - printf("ReadFrame\n"); if(this->fd<0) throw std::runtime_error("File not open"); @@ -877,9 +904,9 @@ class Device_manager_Worker_thread_args buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buffer.memory = V4L2_MEMORY_MMAP; - if(my_ioctl(this->fd, VIDIOC_DQBUF, &buffer)) + if(my_ioctl(this->fd, VIDIOC_DQBUF, &buffer, 10000)) { - throw std::runtime_error("VIDIOC_DQBUF failed"); + return 0; } #ifdef USE_LIBV4L @@ -1127,6 +1154,7 @@ class Device_manager_Worker_thread_args { while(running) { + //printf("Sleep\n"); usleep(1000); if(deviceStarted) this->ReadFrame(); @@ -1170,9 +1198,9 @@ class Device_manager_Worker_thread_args pthread_mutex_unlock(&this->lock); } } - catch(std::exception) + catch(std::exception &err) { - + printf("An exception has occured: %s\n", err.what()); } printf("Thread stopping\n"); From 72cb6c82ea17876b2b9c497d0643afae8cc86963 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 20 Oct 2013 20:22:33 +0100 Subject: [PATCH 030/256] Broke something, segfaults --- v4l2capture.cpp | 75 +++++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 66 insertions(+), 9 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 70f55c2..295ee22 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -805,7 +805,7 @@ class Device_manager_Worker_thread_args int stopped; pthread_mutex_t lock; std::vector openDeviceFlag; - std::vector startDeviceFlag; + std::vector startDeviceFlag; int stopDeviceFlag; int closeDeviceFlag; int deviceStarted; @@ -871,10 +871,10 @@ class Device_manager_Worker_thread_args pthread_mutex_unlock(&this->lock); }; - void StartDevice() + void StartDevice(int buffer_count) { pthread_mutex_lock(&this->lock); - this->startDeviceFlag.push_back(this->devName.c_str()); + this->startDeviceFlag.push_back(buffer_count); pthread_mutex_unlock(&this->lock); }; @@ -993,7 +993,35 @@ class Device_manager_Worker_thread_args return 1; } - int StartDeviceInternal() + int SetFormat(int size_x, int size_y, const char *fmt) + { + struct v4l2_format format; + format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + format.fmt.pix.width = size_x; + format.fmt.pix.height = size_y; + #ifdef USE_LIBV4L + format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; + if(fmt != NULL && strcmp(fmt, "MJPEG")==0) + format.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; + if(fmt != NULL && strcmp(fmt, "RGB24")==0) + format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; + if(fmt != NULL && strcmp(fmt, "YUV420")==0) + format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420; + #else + format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; + #endif + format.fmt.pix.field = V4L2_FIELD_NONE; + format.fmt.pix.bytesperline = 0; + + if(my_ioctl(this->fd, VIDIOC_S_FMT, &format)) + { + return 0; + } + + return 1; + } + + int StartDeviceInternal(int buffer_count = 10) { printf("StartDeviceInternal\n"); //Check this device has not already been start @@ -1021,7 +1049,6 @@ class Device_manager_Worker_thread_args // calling 'start' if v4l2capture is compiled with libv4l2. Otherwise // raises IOError. - int buffer_count = 10; struct v4l2_requestbuffers reqbuf; reqbuf.count = buffer_count; reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; @@ -1162,9 +1189,9 @@ class Device_manager_Worker_thread_args pthread_mutex_lock(&this->lock); if(this->startDeviceFlag.size() > 0) { - std::string devName = this->startDeviceFlag[this->startDeviceFlag.size()-1]; + int buffer_count = this->startDeviceFlag[this->startDeviceFlag.size()-1]; this->startDeviceFlag.pop_back(); - this->StartDeviceInternal(); + this->StartDeviceInternal(buffer_count); } pthread_mutex_unlock(&this->lock); @@ -1272,8 +1299,32 @@ static PyObject *Device_manager_open(Device_manager *self, PyObject *args) Py_RETURN_NONE; } + +static PyObject *Device_manager_set_format(Device_manager *self, PyObject *args) +{ + int size_x; + int size_y; + const char *fmt = NULL; + + if(!PyArg_ParseTuple(args, "ii|s", &size_x, &size_y, &fmt)) + { + Py_RETURN_NONE; + } + + const char *devarg = "/dev/video0"; + if(PyTuple_Size(args) >= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + + class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; + int ret = threadArgs->SetFormat(size_x, size_y, fmt); + + return Py_BuildValue("i", ret); +} + static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) -// self, dev = None, reqSize=(640, 480), reqFps = 30, fmt = "MJPEG", buffer_count = 10): { //Process arguments @@ -1292,7 +1343,7 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) } class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; - threadArgs->StartDevice(); + threadArgs->StartDevice(buffer_count); Py_RETURN_NONE; } @@ -1409,6 +1460,11 @@ static PyMethodDef Device_manager_methods[] = { {"open", (PyCFunction)Device_manager_open, METH_VARARGS, "open(dev = '\\dev\\video0')\n\n" "Open video capture."}, + {"set_format", (PyCFunction)Device_manager_set_format, METH_VARARGS, + "set_format(size_x, size_y, pixel_format='RGB24') -> size_x, size_y\n\n" + "Request the video device to set image size and format. The device may " + "choose another size than requested and will return its choice. The " + "pixel format may be either RGB24, YUV420 or MJPEG."}, {"start", (PyCFunction)Device_manager_Start, METH_VARARGS, "start(dev = '\\dev\\video0', reqSize=(640, 480), reqFps = 30, fmt = 'MJPEG\', buffer_count = 10)\n\n" "Start video capture."}, @@ -1418,6 +1474,7 @@ static PyMethodDef Device_manager_methods[] = { {"close", (PyCFunction)Device_manager_close, METH_VARARGS, "close(dev = '\\dev\\video0')\n\n" "Close video device. Subsequent calls to other methods will fail."}, + {NULL} }; From f2fa97ece668d30ee51b38525808e1ac51480947 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 20 Oct 2013 20:31:51 +0100 Subject: [PATCH 031/256] Set format works but remove sleeps cause segfault --- v4l2capture.cpp | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 295ee22..768f29d 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -995,6 +995,7 @@ class Device_manager_Worker_thread_args int SetFormat(int size_x, int size_y, const char *fmt) { + pthread_mutex_lock(&this->lock); struct v4l2_format format; format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; format.fmt.pix.width = size_x; @@ -1015,9 +1016,11 @@ class Device_manager_Worker_thread_args if(my_ioctl(this->fd, VIDIOC_S_FMT, &format)) { + pthread_mutex_unlock(&this->lock); return 0; } + pthread_mutex_unlock(&this->lock); return 1; } @@ -1305,19 +1308,13 @@ static PyObject *Device_manager_set_format(Device_manager *self, PyObject *args) int size_x; int size_y; const char *fmt = NULL; + const char *devarg = NULL; - if(!PyArg_ParseTuple(args, "ii|s", &size_x, &size_y, &fmt)) + if(!PyArg_ParseTuple(args, "sii|s", &devarg, &size_x, &size_y, &fmt)) { Py_RETURN_NONE; } - const char *devarg = "/dev/video0"; - if(PyTuple_Size(args) >= 1) - { - PyObject *pydevarg = PyTuple_GetItem(args, 0); - devarg = PyString_AsString(pydevarg); - } - class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; int ret = threadArgs->SetFormat(size_x, size_y, fmt); From a85cf9ee6db2cb5f9b9339477acb165f2131c95c Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 20 Oct 2013 20:33:01 +0100 Subject: [PATCH 032/256] Update doc strings --- v4l2capture.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 768f29d..a88e91f 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -1458,7 +1458,7 @@ static PyMethodDef Device_manager_methods[] = { "open(dev = '\\dev\\video0')\n\n" "Open video capture."}, {"set_format", (PyCFunction)Device_manager_set_format, METH_VARARGS, - "set_format(size_x, size_y, pixel_format='RGB24') -> size_x, size_y\n\n" + "set_format(dev, size_x, size_y, pixel_format='RGB24') -> size_x, size_y\n\n" "Request the video device to set image size and format. The device may " "choose another size than requested and will return its choice. The " "pixel format may be either RGB24, YUV420 or MJPEG."}, From 9637f12d418e27460fdf305dc766b58b5d29c455 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 20 Oct 2013 22:12:08 +0100 Subject: [PATCH 033/256] Control order of actions in worker thread --- v4l2capture.cpp | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index a88e91f..1eaacf6 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -990,6 +990,7 @@ class Device_manager_Worker_thread_args } this->deviceStarted = 0; + printf("Done opening\n"); return 1; } @@ -1190,25 +1191,27 @@ class Device_manager_Worker_thread_args if(deviceStarted) this->ReadFrame(); pthread_mutex_lock(&this->lock); - if(this->startDeviceFlag.size() > 0) + if(this->openDeviceFlag.size() > 0) { - int buffer_count = this->startDeviceFlag[this->startDeviceFlag.size()-1]; - this->startDeviceFlag.pop_back(); - this->StartDeviceInternal(buffer_count); + std::string devName = this->openDeviceFlag[this->openDeviceFlag.size()-1]; + this->openDeviceFlag.pop_back(); + this->OpenDeviceInternal(); } pthread_mutex_unlock(&this->lock); pthread_mutex_lock(&this->lock); - if(this->openDeviceFlag.size() > 0) + if(this->startDeviceFlag.size() > 0 && this->openDeviceFlag.size() == 0) { - std::string devName = this->openDeviceFlag[this->openDeviceFlag.size()-1]; - this->openDeviceFlag.pop_back(); - this->OpenDeviceInternal(); + int buffer_count = this->startDeviceFlag[this->startDeviceFlag.size()-1]; + this->startDeviceFlag.pop_back(); + this->StartDeviceInternal(buffer_count); } pthread_mutex_unlock(&this->lock); pthread_mutex_lock(&this->lock); - if(this->stopDeviceFlag) + if(this->stopDeviceFlag + && this->openDeviceFlag.size() == 0 + && this->startDeviceFlag.size() == 0) { this->StopDeviceInternal(); this->stopDeviceFlag = 0; @@ -1216,7 +1219,9 @@ class Device_manager_Worker_thread_args pthread_mutex_unlock(&this->lock); pthread_mutex_lock(&this->lock); - if(this->closeDeviceFlag) + if(this->closeDeviceFlag && !this->stopDeviceFlag + && this->openDeviceFlag.size() == 0 + && this->startDeviceFlag.size() == 0) { this->CloseDeviceInternal(); this->closeDeviceFlag = 0; From e3ddbbbacd355586c98ed028ecbf7e639c61dc5e Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Mon, 21 Oct 2013 15:01:37 +0100 Subject: [PATCH 034/256] Move frame decode to separate func --- v4l2capture.cpp | 62 +++++++------------------------------------------ 1 file changed, 9 insertions(+), 53 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 1eaacf6..286ad76 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -812,6 +812,7 @@ class Device_manager_Worker_thread_args int fd; struct buffer *buffers; int buffer_counts; + std::string pxFmt; Device_manager_Worker_thread_args(const char *devNameIn) { @@ -909,55 +910,7 @@ class Device_manager_Worker_thread_args return 0; } - #ifdef USE_LIBV4L - printf("rx %d\n", buffer.bytesused); - //PyObject *result = PyString_FromStringAndSize( - // (const char*)self->buffers[buffer.index].start, buffer.bytesused); - - //if(!result) - // { - // Py_RETURN_NONE; - // } - #else - // Convert buffer from YUYV to RGB. - // For the byte order, see: http://v4l2spec.bytesex.org/spec/r4339.htm - // For the color conversion, see: http://v4l2spec.bytesex.org/spec/x2123.htm - int length = buffer.bytesused * 6 / 4; - PyObject *result = PyString_FromStringAndSize(NULL, length); - - if(!result) - { - Py_RETURN_NONE; - } - - char *rgb = PyString_AS_STRING(result); - char *rgb_max = rgb + length; - unsigned char *yuyv = self->buffers[buffer.index].start; - - #define CLAMP(c) ((c) <= 0 ? 0 : (c) >= 65025 ? 255 : (c) >> 8) - while(rgb < rgb_max) - { - int u = yuyv[1] - 128; - int v = yuyv[3] - 128; - int uv = 100 * u + 208 * v; - u *= 516; - v *= 409; - - int y = 298 * (yuyv[0] - 16); - rgb[0] = CLAMP(y + v); - rgb[1] = CLAMP(y - uv); - rgb[2] = CLAMP(y + u); - - y = 298 * (yuyv[2] - 16); - rgb[3] = CLAMP(y + v); - rgb[4] = CLAMP(y - uv); - rgb[5] = CLAMP(y + u); - - rgb += 6; - yuyv += 4; - } - #undef CLAMP - #endif + this->DecodeFrame((const unsigned char*)this->buffers[buffer.index].start, buffer.bytesused); //PyObject *out = result; @@ -978,6 +931,11 @@ class Device_manager_Worker_thread_args return 1; } + int DecodeFrame(const unsigned char *data, unsigned dataLen) + { + printf("rx %d\n", dataLen); + } + int OpenDeviceInternal() { printf("OpenDeviceInternal\n"); @@ -1021,6 +979,8 @@ class Device_manager_Worker_thread_args return 0; } + //Store pixel format for decoding usage later + this->pxFmt = fmt; pthread_mutex_unlock(&this->lock); return 1; } @@ -1038,10 +998,6 @@ class Device_manager_Worker_thread_args //TODO /* - //Suggest an image size to the device. The device may choose and - //return another size if it doesn't support the suggested one. - self.video.set_format(reqSize[0], reqSize[1], fmt) - //Query current pixel format self.size_x, self.size_y, self.pixelFmt = self.video.get_format() From 44cdbe5038f6e239275179eeed95a2767b71bbac Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Mon, 21 Oct 2013 15:06:32 +0100 Subject: [PATCH 035/256] Move frame decode to separate func --- v4l2capture.cpp | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 286ad76..e4cc083 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -796,6 +796,34 @@ static PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) // ********************************************************************* +class SetFormatParams +{ +public: + std::string fmt; + int width, height; + + SetFormatParams() + { + width = 0; + height = 0; + } + + SetFormatParams(const class SetFormatParams &in) + { + SetFormatParams::operator=(in); + } + + class SetFormatParams &SetFormatParams=(const class SetFormatParams &in) + { + width = in.width; + height = in.height; + fmt = in.fmt + return *this; + } + +}; + + class Device_manager_Worker_thread_args { public: @@ -872,6 +900,16 @@ class Device_manager_Worker_thread_args pthread_mutex_unlock(&this->lock); }; + void SetFormat() + { + class SetFormatParams params; + + + pthread_mutex_lock(&this->lock); + //this->openDeviceFlag.push_back(this->devName.c_str()); + pthread_mutex_unlock(&this->lock); + } + void StartDevice(int buffer_count) { pthread_mutex_lock(&this->lock); From 14791b7f58b9a87837676ce2d4b8d1029ebc747c Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Mon, 21 Oct 2013 15:27:17 +0100 Subject: [PATCH 036/256] Change set format to be async --- v4l2capture.cpp | 72 ++++++++++++++++++++++++++++++++----------------- 1 file changed, 47 insertions(+), 25 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index e4cc083..87071c8 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -808,19 +808,18 @@ class SetFormatParams height = 0; } - SetFormatParams(const class SetFormatParams &in) + SetFormatParams(const SetFormatParams &in) { SetFormatParams::operator=(in); } - class SetFormatParams &SetFormatParams=(const class SetFormatParams &in) + const SetFormatParams &operator=(const SetFormatParams &in) { width = in.width; height = in.height; - fmt = in.fmt + fmt = in.fmt; return *this; - } - + } }; @@ -834,6 +833,7 @@ class Device_manager_Worker_thread_args pthread_mutex_t lock; std::vector openDeviceFlag; std::vector startDeviceFlag; + std::vector setFormatFlags; int stopDeviceFlag; int closeDeviceFlag; int deviceStarted; @@ -900,13 +900,15 @@ class Device_manager_Worker_thread_args pthread_mutex_unlock(&this->lock); }; - void SetFormat() + void SetFormat(const char *fmt, int width, int height) { class SetFormatParams params; - + params.fmt = fmt; + params.width = width; + params.height = height; pthread_mutex_lock(&this->lock); - //this->openDeviceFlag.push_back(this->devName.c_str()); + this->setFormatFlags.push_back(params); pthread_mutex_unlock(&this->lock); } @@ -931,6 +933,7 @@ class Device_manager_Worker_thread_args pthread_mutex_unlock(&this->lock); }; +protected: int ReadFrame() { if(this->fd<0) @@ -972,6 +975,7 @@ class Device_manager_Worker_thread_args int DecodeFrame(const unsigned char *data, unsigned dataLen) { printf("rx %d\n", dataLen); + return 1; } int OpenDeviceInternal() @@ -990,24 +994,26 @@ class Device_manager_Worker_thread_args return 1; } - int SetFormat(int size_x, int size_y, const char *fmt) + int SetFormatInternal(class SetFormatParams &args) { + //int size_x, int size_y, const char *fmt; + pthread_mutex_lock(&this->lock); struct v4l2_format format; format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - format.fmt.pix.width = size_x; - format.fmt.pix.height = size_y; - #ifdef USE_LIBV4L + format.fmt.pix.width = args.width; + format.fmt.pix.height = args.height; format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; - if(fmt != NULL && strcmp(fmt, "MJPEG")==0) + + if(strcmp(args.fmt.c_str(), "MJPEG")==0) format.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; - if(fmt != NULL && strcmp(fmt, "RGB24")==0) + if(strcmp(args.fmt.c_str(), "RGB24")==0) format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; - if(fmt != NULL && strcmp(fmt, "YUV420")==0) + if(strcmp(args.fmt.c_str(), "YUV420")==0) format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420; - #else - format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; - #endif + //if(strcmp(args.fmt.c_str(), "YUVV")==0) + // format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUVV; + format.fmt.pix.field = V4L2_FIELD_NONE; format.fmt.pix.bytesperline = 0; @@ -1018,7 +1024,7 @@ class Device_manager_Worker_thread_args } //Store pixel format for decoding usage later - this->pxFmt = fmt; + this->pxFmt = args.fmt; pthread_mutex_unlock(&this->lock); return 1; } @@ -1167,6 +1173,7 @@ class Device_manager_Worker_thread_args return 1; } +public: void Run() { printf("Thread started: %s\n", this->devName.c_str()); @@ -1192,9 +1199,21 @@ class Device_manager_Worker_thread_args this->OpenDeviceInternal(); } pthread_mutex_unlock(&this->lock); + + pthread_mutex_lock(&this->lock); + if(this->setFormatFlags.size() > 0 + && this->openDeviceFlag.size() == 0) + { + class SetFormatParams params = this->setFormatFlags[this->setFormatFlags.size()-1]; + this->setFormatFlags.pop_back(); + this->SetFormatInternal(params); + } + pthread_mutex_unlock(&this->lock); pthread_mutex_lock(&this->lock); - if(this->startDeviceFlag.size() > 0 && this->openDeviceFlag.size() == 0) + if(this->startDeviceFlag.size() > 0 + && this->openDeviceFlag.size() == 0 + && this->setFormatFlags.size() == 0) { int buffer_count = this->startDeviceFlag[this->startDeviceFlag.size()-1]; this->startDeviceFlag.pop_back(); @@ -1204,7 +1223,8 @@ class Device_manager_Worker_thread_args pthread_mutex_lock(&this->lock); if(this->stopDeviceFlag - && this->openDeviceFlag.size() == 0 + && this->openDeviceFlag.size() == 0 + && this->setFormatFlags.size() == 0 && this->startDeviceFlag.size() == 0) { this->StopDeviceInternal(); @@ -1213,9 +1233,11 @@ class Device_manager_Worker_thread_args pthread_mutex_unlock(&this->lock); pthread_mutex_lock(&this->lock); - if(this->closeDeviceFlag && !this->stopDeviceFlag + if(this->closeDeviceFlag && this->openDeviceFlag.size() == 0 - && this->startDeviceFlag.size() == 0) + && this->setFormatFlags.size() == 0 + && this->startDeviceFlag.size() == 0 + && !this->stopDeviceFlag) { this->CloseDeviceInternal(); this->closeDeviceFlag = 0; @@ -1315,9 +1337,9 @@ static PyObject *Device_manager_set_format(Device_manager *self, PyObject *args) } class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; - int ret = threadArgs->SetFormat(size_x, size_y, fmt); + threadArgs->SetFormat(fmt, size_x, size_y); - return Py_BuildValue("i", ret); + Py_RETURN_NONE; } static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) From 422464dd5a617c81cc47f4c50261b0e244b9546d Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Mon, 21 Oct 2013 16:04:47 +0100 Subject: [PATCH 037/256] Move decode frame to separate function --- v4l2capture.cpp | 51 +++++++++++++++++++++++++++++++++++++++---------- 1 file changed, 41 insertions(+), 10 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 87071c8..168dd8d 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -796,6 +796,27 @@ static PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) // ********************************************************************* +int DecodeFrame(const unsigned char *data, unsigned dataLen, + const char *inPxFmt, + int width, int height, + const char *targetPxFmt, + unsigned char **buffOut, + unsigned *buffOutLen) +{ + printf("rx %d %s\n", dataLen, inPxFmt); + + if(strcmp(inPxFmt,"MJPEG")==0) + { + + } + + + return 1; +} + + +// ********************************************************************** + class SetFormatParams { public: @@ -839,6 +860,7 @@ class Device_manager_Worker_thread_args int deviceStarted; int fd; struct buffer *buffers; + int frameWidth, frameHeight; int buffer_counts; std::string pxFmt; @@ -853,6 +875,8 @@ class Device_manager_Worker_thread_args buffers = NULL; stopDeviceFlag = 0; closeDeviceFlag = 0; + frameWidth = 0; + frameHeight = 0; }; virtual ~Device_manager_Worker_thread_args() @@ -951,7 +975,13 @@ class Device_manager_Worker_thread_args return 0; } - this->DecodeFrame((const unsigned char*)this->buffers[buffer.index].start, buffer.bytesused); + unsigned char *rgbBuff = NULL; + unsigned rgbBuffLen = 0; + DecodeFrame((const unsigned char*)this->buffers[buffer.index].start, buffer.bytesused, + this->pxFmt.c_str(), + this->frameWidth, + this->frameHeight, + "RGB24", &rgbBuff, &rgbBuffLen); //PyObject *out = result; @@ -972,12 +1002,6 @@ class Device_manager_Worker_thread_args return 1; } - int DecodeFrame(const unsigned char *data, unsigned dataLen) - { - printf("rx %d\n", dataLen); - return 1; - } - int OpenDeviceInternal() { printf("OpenDeviceInternal\n"); @@ -996,9 +1020,9 @@ class Device_manager_Worker_thread_args int SetFormatInternal(class SetFormatParams &args) { + printf("SetFormatInternal\n"); //int size_x, int size_y, const char *fmt; - pthread_mutex_lock(&this->lock); struct v4l2_format format; format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; format.fmt.pix.width = args.width; @@ -1019,13 +1043,13 @@ class Device_manager_Worker_thread_args if(my_ioctl(this->fd, VIDIOC_S_FMT, &format)) { - pthread_mutex_unlock(&this->lock); return 0; } //Store pixel format for decoding usage later this->pxFmt = args.fmt; - pthread_mutex_unlock(&this->lock); + this->frameWidth = args.width; + this->frameHeight = args.height; return 1; } @@ -1053,6 +1077,13 @@ class Device_manager_Worker_thread_args // calling 'start' if v4l2capture is compiled with libv4l2. Otherwise // raises IOError. + if(this->pxFmt.length()==0) + { + //Get current pixel format + //TODO + throw std::runtime_error("Set format before starting"); + } + struct v4l2_requestbuffers reqbuf; reqbuf.count = buffer_count; reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; From 83e31a04241d447238f8b0ae8756c80db000a6eb Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Mon, 21 Oct 2013 16:18:26 +0100 Subject: [PATCH 038/256] Insert huffman table --- v4l2capture.cpp | 82 +++++++++++++++++++++++++++---------------------- 1 file changed, 46 insertions(+), 36 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 168dd8d..d2f4653 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -712,40 +712,13 @@ int ReadJpegFrame(const unsigned char *data, unsigned offset, const unsigned cha return 1; } -static PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) +int InsertHuffmanTableCTypes(const unsigned char* inBufferPtr, unsigned inBufferLen, std::string &outBuffer) { - /* This converts an MJPEG frame into a standard JPEG binary - MJPEG images omit the huffman table if the standard table - is used. If it is missing, this function adds the table - into the file structure. */ - - if(PyTuple_Size(args) < 1) - { - PyErr_BadArgument(); - PyErr_Format(PyExc_TypeError, "Function requires 1 argument"); - Py_RETURN_NONE; - } - - PyObject *inBuffer = PyTuple_GetItem(args, 0); - - if(!PyString_Check(inBuffer)) - { - PyErr_BadArgument(); - PyErr_Format(PyExc_TypeError, "Argument 1 must be a string."); - //PyObject* type = PyObject_Type(inBuffer); - //PyObject_Print(type, stdout, Py_PRINT_RAW); - //Py_CLEAR(type); - - Py_RETURN_NONE; - } - int parsing = 1; unsigned frameStartPos = 0; int huffFound = 0; - unsigned char* inBufferPtr = (unsigned char*)PyString_AsString(inBuffer); - Py_ssize_t inBufferLen = PyString_Size(inBuffer); - PyObject *outBuffer = PyString_FromString(""); + outBuffer.clear(); while(parsing) { @@ -768,15 +741,13 @@ static PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) //Stop if there is a serious error if(!ok) { - parsing = 0; - continue; + return 0; } //Check if this segment is the compressed data if(twoBytes[0] == 0xff && twoBytes[1] == 0xda && !huffFound) { - PyObject *substr = PyString_FromStringAndSize(huffmanSegment, HUFFMAN_SEGMENT_LEN); - PyString_ConcatAndDel(&outBuffer, substr); + outBuffer.append(huffmanSegment, HUFFMAN_SEGMENT_LEN); } //Check the type of frame @@ -784,14 +755,50 @@ static PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) huffFound = 1; //Write current structure to output - PyObject *substr = PyString_FromStringAndSize((char *)&inBufferPtr[frameStartPos], frameEndPos - frameStartPos); - PyString_ConcatAndDel(&outBuffer, substr); + outBuffer.append((char *)&inBufferPtr[frameStartPos], frameEndPos - frameStartPos); //Move cursor frameStartPos = frameEndPos; } + return 1; +} + +static PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) +{ + /* This converts an MJPEG frame into a standard JPEG binary + MJPEG images omit the huffman table if the standard table + is used. If it is missing, this function adds the table + into the file structure. */ + + if(PyTuple_Size(args) < 1) + { + PyErr_BadArgument(); + PyErr_Format(PyExc_TypeError, "Function requires 1 argument"); + Py_RETURN_NONE; + } + + PyObject *inBuffer = PyTuple_GetItem(args, 0); - return outBuffer; + if(!PyString_Check(inBuffer)) + { + PyErr_BadArgument(); + PyErr_Format(PyExc_TypeError, "Argument 1 must be a string."); + //PyObject* type = PyObject_Type(inBuffer); + //PyObject_Print(type, stdout, Py_PRINT_RAW); + //Py_CLEAR(type); + + Py_RETURN_NONE; + } + + unsigned char* inBufferPtr = (unsigned char*)PyString_AsString(inBuffer); + Py_ssize_t inBufferLen = PyString_Size(inBuffer); + std::string outBuffer; + + InsertHuffmanTableCTypes((unsigned char*)inBufferPtr, inBufferLen, outBuffer); + + PyObject *outBufferPy = PyString_FromStringAndSize(outBuffer.c_str(), outBuffer.length()); + + return outBufferPy; } // ********************************************************************* @@ -807,7 +814,10 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, if(strcmp(inPxFmt,"MJPEG")==0) { + std::string jpegBin; + InsertHuffmanTableCTypes(data, dataLen, jpegBin); + } From f5b30e925406c2c0a53f377915cb221fd5f4c314 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Mon, 21 Oct 2013 16:53:37 +0100 Subject: [PATCH 039/256] Using libjpeg but now segfaults --- setup.py | 2 +- v4l2capture.cpp | 84 ++++++++++++++++++++++++++++++++++++++++++++----- 2 files changed, 78 insertions(+), 8 deletions(-) diff --git a/setup.py b/setup.py index f05492f..4ac9448 100755 --- a/setup.py +++ b/setup.py @@ -25,5 +25,5 @@ "License :: Public Domain", "Programming Language :: C++"], ext_modules = [ - Extension("v4l2capture", ["v4l2capture.cpp"], libraries = ["v4l2", "pthread"])]) + Extension("v4l2capture", ["v4l2capture.cpp"], libraries = ["v4l2", "pthread", "jpeg"])]) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index d2f4653..e0b467c 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -18,6 +18,7 @@ #include #include #include +#include #ifdef USE_LIBV4L #include @@ -98,8 +99,8 @@ static int my_ioctl(int fd, int request, void *arg, int utimeout = -1) FD_SET (fd, &fds); struct timeval tv; - tv.tv_sec = 0; - tv.tv_usec = utimeout; + tv.tv_sec = 0; + tv.tv_usec = utimeout; int r = select(fd+1, &fds, NULL, NULL, &tv); if(r == 0) @@ -803,6 +804,75 @@ static PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) // ********************************************************************* +int ReadJpegFile(unsigned char * inbuffer, + unsigned long insize) +{ + /* This struct contains the JPEG decompression parameters and pointers to + * working space (which is allocated as needed by the JPEG library). + */ + struct jpeg_decompress_struct cinfo; + + /* More stuff */ + JSAMPARRAY buffer; /* Output row buffer */ + int row_stride; /* physical row width in output buffer */ + + /* Step 1: initialize the JPEG decompression object. */ + jpeg_create_decompress(&cinfo); + + /* Step 2: specify data source */ + jpeg_mem_src(&cinfo, inbuffer, insize); + + printf("1 %ld %d\n", (long)inbuffer, insize); + /* Step 3: read file parameters with jpeg_read_header() */ + jpeg_read_header(&cinfo, TRUE); + + /* Step 4: set parameters for decompression */ + //Optional + printf("1b\n"); + + /* Step 5: Start decompressor */ + jpeg_start_decompress(&cinfo); + printf("2\n"); + /* JSAMPLEs per row in output buffer */ + row_stride = cinfo.output_width * cinfo.output_components; + /* Make a one-row-high sample array that will go away when done with image */ + buffer = (*cinfo.mem->alloc_sarray) + ((j_common_ptr) &cinfo, JPOOL_IMAGE, row_stride, 1); + + /* Step 6: while (scan lines remain to be read) */ + /* jpeg_read_scanlines(...); */ + + /* Here we use the library's state variable cinfo.output_scanline as the + * loop counter, so that we don't have to keep track ourselves. + */ + while (cinfo.output_scanline < cinfo.output_height) { + /* jpeg_read_scanlines expects an array of pointers to scanlines. + * Here the array is only one element long, but you could ask for + * more than one scanline at a time if that's more convenient. + */ + jpeg_read_scanlines(&cinfo, buffer, 1); + /* Assume put_scanline_someplace wants a pointer and sample count. */ + //put_scanline_someplace(buffer[0], row_stride); + printf("%d\n",row_stride); + } + + /* Step 7: Finish decompression */ + jpeg_finish_decompress(&cinfo); + + /* Step 8: Release JPEG decompression object */ + + /* This is an important step since it will release a good deal of memory. */ + jpeg_destroy_decompress(&cinfo); + + /* At this point you may want to check to see whether any corrupt-data + * warnings occurred (test whether jerr.pub.num_warnings is nonzero). + */ + + return 1; +} + +// ********************************************************************* + int DecodeFrame(const unsigned char *data, unsigned dataLen, const char *inPxFmt, int width, int height, @@ -816,11 +886,11 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, { std::string jpegBin; InsertHuffmanTableCTypes(data, dataLen, jpegBin); - - + printf("a\n"); + ReadJpegFile((unsigned char*)jpegBin.c_str(), jpegBin.length()); + printf("b\n"); } - return 1; } @@ -1045,8 +1115,8 @@ class Device_manager_Worker_thread_args format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; if(strcmp(args.fmt.c_str(), "YUV420")==0) format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420; - //if(strcmp(args.fmt.c_str(), "YUVV")==0) - // format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUVV; + if(strcmp(args.fmt.c_str(), "YUYV")==0) + format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; format.fmt.pix.field = V4L2_FIELD_NONE; format.fmt.pix.bytesperline = 0; From ace61987f9a565f4c959f5b9f94440f01de7d425 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Mon, 21 Oct 2013 17:12:23 +0100 Subject: [PATCH 040/256] MJpeg appears to decode --- v4l2capture.cpp | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index e0b467c..f996331 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -804,6 +804,12 @@ static PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) // ********************************************************************* +struct my_error_mgr { + struct jpeg_error_mgr pub; /* "public" fields */ + + jmp_buf setjmp_buffer; /* for return to caller */ +}; + int ReadJpegFile(unsigned char * inbuffer, unsigned long insize) { @@ -811,28 +817,27 @@ int ReadJpegFile(unsigned char * inbuffer, * working space (which is allocated as needed by the JPEG library). */ struct jpeg_decompress_struct cinfo; + struct my_error_mgr jerr; /* More stuff */ JSAMPARRAY buffer; /* Output row buffer */ int row_stride; /* physical row width in output buffer */ /* Step 1: initialize the JPEG decompression object. */ + cinfo.err = jpeg_std_error(&jerr.pub); jpeg_create_decompress(&cinfo); /* Step 2: specify data source */ jpeg_mem_src(&cinfo, inbuffer, insize); - printf("1 %ld %d\n", (long)inbuffer, insize); /* Step 3: read file parameters with jpeg_read_header() */ jpeg_read_header(&cinfo, TRUE); /* Step 4: set parameters for decompression */ //Optional - printf("1b\n"); /* Step 5: Start decompressor */ jpeg_start_decompress(&cinfo); - printf("2\n"); /* JSAMPLEs per row in output buffer */ row_stride = cinfo.output_width * cinfo.output_components; /* Make a one-row-high sample array that will go away when done with image */ @@ -853,7 +858,7 @@ int ReadJpegFile(unsigned char * inbuffer, jpeg_read_scanlines(&cinfo, buffer, 1); /* Assume put_scanline_someplace wants a pointer and sample count. */ //put_scanline_someplace(buffer[0], row_stride); - printf("%d\n",row_stride); + //printf("%d\n", cinfo.output_scanline); } /* Step 7: Finish decompression */ @@ -886,9 +891,7 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, { std::string jpegBin; InsertHuffmanTableCTypes(data, dataLen, jpegBin); - printf("a\n"); ReadJpegFile((unsigned char*)jpegBin.c_str(), jpegBin.length()); - printf("b\n"); } return 1; From 3790e097846ed5a0165b371e725793e2cd654b7c Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Mon, 21 Oct 2013 18:05:14 +0100 Subject: [PATCH 041/256] Return image to python --- v4l2capture.cpp | 157 +++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 142 insertions(+), 15 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index f996331..f254492 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -804,20 +804,29 @@ static PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) // ********************************************************************* -struct my_error_mgr { - struct jpeg_error_mgr pub; /* "public" fields */ +struct my_error_mgr +{ + struct jpeg_error_mgr pub; /* "public" fields */ - jmp_buf setjmp_buffer; /* for return to caller */ + jmp_buf setjmp_buffer; /* for return to caller */ }; int ReadJpegFile(unsigned char * inbuffer, - unsigned long insize) + unsigned long insize, + unsigned char **outBuffer, + unsigned *outBufferSize, + int *widthOut, int *heightOut, int *channelsOut) { /* This struct contains the JPEG decompression parameters and pointers to * working space (which is allocated as needed by the JPEG library). */ struct jpeg_decompress_struct cinfo; struct my_error_mgr jerr; + *outBuffer = NULL; + *outBufferSize = 0; + *widthOut = 0; + *heightOut = 0; + *channelsOut = 0; /* More stuff */ JSAMPARRAY buffer; /* Output row buffer */ @@ -833,6 +842,12 @@ int ReadJpegFile(unsigned char * inbuffer, /* Step 3: read file parameters with jpeg_read_header() */ jpeg_read_header(&cinfo, TRUE); + *outBufferSize = cinfo.image_width * cinfo.image_height * cinfo.num_components; + *outBuffer = new unsigned char[*outBufferSize]; + *widthOut = cinfo.image_width; + *heightOut = cinfo.image_height; + *channelsOut = cinfo.num_components; + /* Step 4: set parameters for decompression */ //Optional @@ -858,7 +873,12 @@ int ReadJpegFile(unsigned char * inbuffer, jpeg_read_scanlines(&cinfo, buffer, 1); /* Assume put_scanline_someplace wants a pointer and sample count. */ //put_scanline_someplace(buffer[0], row_stride); - //printf("%d\n", cinfo.output_scanline); + assert(row_stride = cinfo.image_width * cinfo.num_components); + //printf("%ld\n", (long)buffer); + //printf("%ld\n", (long)buffer[0]); + //printf("%d %d\n", (cinfo.output_scanline-1) * row_stride, *outBufferSize); + //printf("%ld %ld\n", (long)outBuffer, (long)&outBuffer[(cinfo.output_scanline-1) * row_stride]); + memcpy(&(*outBuffer)[(cinfo.output_scanline-1) * row_stride], buffer[0], row_stride); } /* Step 7: Finish decompression */ @@ -886,12 +906,34 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, unsigned *buffOutLen) { printf("rx %d %s\n", dataLen, inPxFmt); + *buffOut = NULL; + *buffOutLen = 0; if(strcmp(inPxFmt,"MJPEG")==0) { std::string jpegBin; InsertHuffmanTableCTypes(data, dataLen, jpegBin); - ReadJpegFile((unsigned char*)jpegBin.c_str(), jpegBin.length()); + + unsigned char *decodedBuff = NULL; + unsigned decodedBuffSize = 0; + int widthActual = 0, heightActual = 0, channelsActual = 0; + + ReadJpegFile((unsigned char*)jpegBin.c_str(), jpegBin.length(), + &decodedBuff, + &decodedBuffSize, + &widthActual, &heightActual, &channelsActual); + + if(widthActual == width && heightActual == height) + { + assert(channelsActual == 3); + *buffOut = decodedBuff; + *buffOutLen = decodedBuffSize; + } + else + { + delete [] decodedBuff; + throw std::runtime_error("Decoded jpeg has unexpected size"); + } } return 1; @@ -947,6 +989,10 @@ class Device_manager_Worker_thread_args int buffer_counts; std::string pxFmt; + std::vector decodedFrameBuff; + std::vector decodedFrameLenBuff; + int decodedFrameBuffMaxSize; + Device_manager_Worker_thread_args(const char *devNameIn) { stop = 0; @@ -960,7 +1006,8 @@ class Device_manager_Worker_thread_args closeDeviceFlag = 0; frameWidth = 0; frameHeight = 0; - }; + decodedFrameBuffMaxSize = 10; + } virtual ~Device_manager_Worker_thread_args() { @@ -977,15 +1024,21 @@ class Device_manager_Worker_thread_args if(buffers) delete [] buffers; this->buffers = NULL; + for(unsigned int i=0; idecodedFrameBuff[i]; + } + this->decodedFrameBuff.clear(); + pthread_mutex_destroy(&lock); - }; + } void Stop() { pthread_mutex_lock(&this->lock); this->stop = 1; pthread_mutex_unlock(&this->lock); - }; + } void WaitForStop() { @@ -998,14 +1051,14 @@ class Device_manager_Worker_thread_args if(s) return; usleep(10000); } - }; + } void OpenDevice() { pthread_mutex_lock(&this->lock); this->openDeviceFlag.push_back(this->devName.c_str()); pthread_mutex_unlock(&this->lock); - }; + } void SetFormat(const char *fmt, int width, int height) { @@ -1024,21 +1077,42 @@ class Device_manager_Worker_thread_args pthread_mutex_lock(&this->lock); this->startDeviceFlag.push_back(buffer_count); pthread_mutex_unlock(&this->lock); - }; + } void StopDevice() { pthread_mutex_lock(&this->lock); this->stopDeviceFlag = 1; pthread_mutex_unlock(&this->lock); - }; + } void CloseDevice() { pthread_mutex_lock(&this->lock); this->closeDeviceFlag = 1; pthread_mutex_unlock(&this->lock); - }; + } + + int GetFrame(unsigned char **buffOut, unsigned *buffLenOut) + { + pthread_mutex_lock(&this->lock); + if(this->decodedFrameBuff.size()==0) + { + //No frame found + *buffOut = NULL; + *buffLenOut = 0; + pthread_mutex_unlock(&this->lock); + return 0; + } + + //Return frame + *buffOut = this->decodedFrameBuff[0]; + *buffLenOut = this->decodedFrameLenBuff[0]; + this->decodedFrameBuff.erase(this->decodedFrameBuff.begin()); + this->decodedFrameLenBuff.erase(this->decodedFrameLenBuff.begin()); + pthread_mutex_unlock(&this->lock); + return 1; + } protected: int ReadFrame() @@ -1060,12 +1134,36 @@ class Device_manager_Worker_thread_args unsigned char *rgbBuff = NULL; unsigned rgbBuffLen = 0; - DecodeFrame((const unsigned char*)this->buffers[buffer.index].start, buffer.bytesused, + int ok = DecodeFrame((const unsigned char*)this->buffers[buffer.index].start, buffer.bytesused, this->pxFmt.c_str(), this->frameWidth, this->frameHeight, "RGB24", &rgbBuff, &rgbBuffLen); + if(ok) + { + if(rgbBuff != NULL) + { + pthread_mutex_lock(&this->lock); + this->decodedFrameBuff.push_back(rgbBuff); + this->decodedFrameLenBuff.push_back(rgbBuffLen); + while(this->decodedFrameBuff.size() > this->decodedFrameBuffMaxSize) + { + this->decodedFrameBuff.erase(this->decodedFrameBuff.begin()); + this->decodedFrameLenBuff.erase(this->decodedFrameLenBuff.begin()); + } + pthread_mutex_unlock(&this->lock); + } + } + else + { + if(rgbBuff != NULL) + { + delete [] rgbBuff; + rgbBuff = NULL; + } + } + //PyObject *out = result; if(1) @@ -1480,6 +1578,32 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) Py_RETURN_NONE; } +static PyObject *Device_manager_Get_frame(Device_manager *self, PyObject *args) +{ + + //Process arguments + const char *devarg = "/dev/video0"; + if(PyTuple_Size(args) >= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + + class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; + unsigned char *buffOut = NULL; + unsigned buffLenOut = 0; + + int ok = threadArgs->GetFrame(&buffOut, &buffLenOut); + if(ok && buffOut != NULL) + { + PyObject *out = PyByteArray_FromStringAndSize((char *)buffOut, buffLenOut); + delete [] buffOut; + return out; + } + + Py_RETURN_NONE; +} + static PyObject *Device_manager_stop(Device_manager *self, PyObject *args) { //Process arguments @@ -1600,6 +1724,9 @@ static PyMethodDef Device_manager_methods[] = { {"start", (PyCFunction)Device_manager_Start, METH_VARARGS, "start(dev = '\\dev\\video0', reqSize=(640, 480), reqFps = 30, fmt = 'MJPEG\', buffer_count = 10)\n\n" "Start video capture."}, + {"get_frame", (PyCFunction)Device_manager_Get_frame, METH_VARARGS, + "start(dev = '\\dev\\video0'\n\n" + "Get video frame."}, {"stop", (PyCFunction)Device_manager_stop, METH_VARARGS, "stop(dev = '\\dev\\video0')\n\n" "Stop video capture."}, From 73ff7706043cc202d05162228b49402c8c079b05 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Mon, 21 Oct 2013 18:29:06 +0100 Subject: [PATCH 042/256] Remove or make debug statements conditional --- v4l2capture.cpp | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index f254492..7d691d2 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -905,7 +905,7 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, unsigned char **buffOut, unsigned *buffOutLen) { - printf("rx %d %s\n", dataLen, inPxFmt); + //printf("rx %d %s\n", dataLen, inPxFmt); *buffOut = NULL; *buffOutLen = 0; @@ -988,6 +988,7 @@ class Device_manager_Worker_thread_args int frameWidth, frameHeight; int buffer_counts; std::string pxFmt; + int verbose; std::vector decodedFrameBuff; std::vector decodedFrameLenBuff; @@ -1007,6 +1008,7 @@ class Device_manager_Worker_thread_args frameWidth = 0; frameHeight = 0; decodedFrameBuffMaxSize = 10; + verbose = 0; } virtual ~Device_manager_Worker_thread_args() @@ -1185,7 +1187,7 @@ class Device_manager_Worker_thread_args int OpenDeviceInternal() { - printf("OpenDeviceInternal\n"); + if(verbose) printf("OpenDeviceInternal\n"); //Open the video device. this->fd = v4l2_open(this->devName.c_str(), O_RDWR | O_NONBLOCK); @@ -1195,13 +1197,13 @@ class Device_manager_Worker_thread_args } this->deviceStarted = 0; - printf("Done opening\n"); + if(verbose) printf("Done opening\n"); return 1; } int SetFormatInternal(class SetFormatParams &args) { - printf("SetFormatInternal\n"); + if(verbose) printf("SetFormatInternal\n"); //int size_x, int size_y, const char *fmt; struct v4l2_format format; @@ -1236,7 +1238,7 @@ class Device_manager_Worker_thread_args int StartDeviceInternal(int buffer_count = 10) { - printf("StartDeviceInternal\n"); + if(verbose) printf("StartDeviceInternal\n"); //Check this device has not already been start if(this->fd==-1) { @@ -1337,13 +1339,13 @@ class Device_manager_Worker_thread_args } this->deviceStarted = 1; - printf("Started ok\n"); + if(verbose) printf("Started ok\n"); return 1; } void StopDeviceInternal() { - printf("StopDeviceInternal\n"); + if(verbose) printf("StopDeviceInternal\n"); if(this->fd==-1) { throw std::runtime_error("Device not started"); @@ -1363,7 +1365,7 @@ class Device_manager_Worker_thread_args int CloseDeviceInternal() { - printf("CloseDeviceInternal\n"); + if(verbose) printf("CloseDeviceInternal\n"); if(this->fd == -1) { throw std::runtime_error("Device not open"); @@ -1388,7 +1390,7 @@ class Device_manager_Worker_thread_args public: void Run() { - printf("Thread started: %s\n", this->devName.c_str()); + if(verbose) printf("Thread started: %s\n", this->devName.c_str()); int running = 1; pthread_mutex_lock(&this->lock); this->stopped = 0; @@ -1463,10 +1465,10 @@ class Device_manager_Worker_thread_args } catch(std::exception &err) { - printf("An exception has occured: %s\n", err.what()); + if(verbose) printf("An exception has occured: %s\n", err.what()); } - printf("Thread stopping\n"); + if(verbose) printf("Thread stopping\n"); pthread_mutex_lock(&this->lock); this->stopped = 1; pthread_mutex_unlock(&this->lock); From 718b2918e170159314b7aaa61ba76374a3af6b72 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Mon, 21 Oct 2013 18:47:55 +0100 Subject: [PATCH 043/256] Improve stability of worker thread --- v4l2capture.cpp | 36 +++++++++++++++++++----------------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 7d691d2..e59e46d 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -992,7 +992,7 @@ class Device_manager_Worker_thread_args std::vector decodedFrameBuff; std::vector decodedFrameLenBuff; - int decodedFrameBuffMaxSize; + unsigned decodedFrameBuffMaxSize; Device_manager_Worker_thread_args(const char *devNameIn) { @@ -1008,7 +1008,7 @@ class Device_manager_Worker_thread_args frameWidth = 0; frameHeight = 0; decodedFrameBuffMaxSize = 10; - verbose = 0; + verbose = 1; } virtual ~Device_manager_Worker_thread_args() @@ -1374,11 +1374,14 @@ class Device_manager_Worker_thread_args if(this->deviceStarted) StopDeviceInternal(); - for(int i = 0; i < this->buffer_counts; i++) + if(this->buffers!= NULL) { - v4l2_munmap(buffers[i].start, buffers[i].length); + for(int i = 0; i < this->buffer_counts; i++) + { + v4l2_munmap(this->buffers[i].start, this->buffers[i].length); + } + delete [] this->buffers; } - delete [] this->buffers; this->buffers = NULL; //Release memory @@ -1406,15 +1409,16 @@ class Device_manager_Worker_thread_args if(deviceStarted) this->ReadFrame(); pthread_mutex_lock(&this->lock); + try + { + if(this->openDeviceFlag.size() > 0) { std::string devName = this->openDeviceFlag[this->openDeviceFlag.size()-1]; this->openDeviceFlag.pop_back(); this->OpenDeviceInternal(); } - pthread_mutex_unlock(&this->lock); - - pthread_mutex_lock(&this->lock); + if(this->setFormatFlags.size() > 0 && this->openDeviceFlag.size() == 0) { @@ -1422,9 +1426,7 @@ class Device_manager_Worker_thread_args this->setFormatFlags.pop_back(); this->SetFormatInternal(params); } - pthread_mutex_unlock(&this->lock); - pthread_mutex_lock(&this->lock); if(this->startDeviceFlag.size() > 0 && this->openDeviceFlag.size() == 0 && this->setFormatFlags.size() == 0) @@ -1433,9 +1435,7 @@ class Device_manager_Worker_thread_args this->startDeviceFlag.pop_back(); this->StartDeviceInternal(buffer_count); } - pthread_mutex_unlock(&this->lock); - pthread_mutex_lock(&this->lock); if(this->stopDeviceFlag && this->openDeviceFlag.size() == 0 && this->setFormatFlags.size() == 0 @@ -1444,9 +1444,7 @@ class Device_manager_Worker_thread_args this->StopDeviceInternal(); this->stopDeviceFlag = 0; } - pthread_mutex_unlock(&this->lock); - pthread_mutex_lock(&this->lock); if(this->closeDeviceFlag && this->openDeviceFlag.size() == 0 && this->setFormatFlags.size() == 0 @@ -1456,10 +1454,14 @@ class Device_manager_Worker_thread_args this->CloseDeviceInternal(); this->closeDeviceFlag = 0; } - pthread_mutex_unlock(&this->lock); - - pthread_mutex_lock(&this->lock); + running = !this->stop; + } + catch(std::exception &err) + { + if(verbose) printf("An exception has occured: %s\n", err.what()); + running = 0; + } pthread_mutex_unlock(&this->lock); } } From 0c0629474256313f0bbda51f10441c609e5d7792 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Mon, 21 Oct 2013 18:56:14 +0100 Subject: [PATCH 044/256] Set format automagically --- v4l2capture.cpp | 53 ++++++++++++++++++++++++++++++++++++++++++------- 1 file changed, 46 insertions(+), 7 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index e59e46d..a41a0aa 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -909,7 +909,7 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, *buffOut = NULL; *buffOutLen = 0; - if(strcmp(inPxFmt,"MJPEG")==0) + if(strcmp(inPxFmt,"MJPEG")==0 && strcmp(targetPxFmt, "RGB24")==0) { std::string jpegBin; InsertHuffmanTableCTypes(data, dataLen, jpegBin); @@ -934,9 +934,10 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, delete [] decodedBuff; throw std::runtime_error("Decoded jpeg has unexpected size"); } + return 1; } - return 1; + return 0; } @@ -1136,11 +1137,12 @@ class Device_manager_Worker_thread_args unsigned char *rgbBuff = NULL; unsigned rgbBuffLen = 0; + char targetFmt[] = "RGB24"; int ok = DecodeFrame((const unsigned char*)this->buffers[buffer.index].start, buffer.bytesused, this->pxFmt.c_str(), this->frameWidth, this->frameHeight, - "RGB24", &rgbBuff, &rgbBuffLen); + targetFmt, &rgbBuff, &rgbBuffLen); if(ok) { @@ -1159,6 +1161,7 @@ class Device_manager_Worker_thread_args } else { + if(verbose) printf("Failed to convert from %s to %s\n", this->pxFmt.c_str(), targetFmt); if(rgbBuff != NULL) { delete [] rgbBuff; @@ -1230,9 +1233,44 @@ class Device_manager_Worker_thread_args } //Store pixel format for decoding usage later - this->pxFmt = args.fmt; - this->frameWidth = args.width; - this->frameHeight = args.height; + //this->pxFmt = args.fmt; + //this->frameWidth = args.width; + //this->frameHeight = args.height; + this->GetFormatInternal(); + + return 1; + } + + int GetFormatInternal() + { + struct v4l2_format format; + format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if(my_ioctl(this->fd, VIDIOC_G_FMT, &format)) + { + return 0; + } + + this->frameWidth = format.fmt.pix.width; + this->frameHeight = format.fmt.pix.height; + + switch(format.fmt.pix.pixelformat) + { + case V4L2_PIX_FMT_MJPEG: + this->pxFmt = "MJPEG"; + break; + case V4L2_PIX_FMT_RGB24: + this->pxFmt = "RGB24"; + break; + case V4L2_PIX_FMT_YUV420: + this->pxFmt = "YUV420"; + break; + case V4L2_PIX_FMT_YUYV: + this->pxFmt = "YUYV"; + break; + default: + this->pxFmt = "Unknown"; + break; + } return 1; } @@ -1264,7 +1302,8 @@ class Device_manager_Worker_thread_args { //Get current pixel format //TODO - throw std::runtime_error("Set format before starting"); + int ret = GetFormatInternal(); + if(!ret) throw std::runtime_error("Could not determine image format"); } struct v4l2_requestbuffers reqbuf; From b9edfc884e2be4ce925ca4a25413737d82e1ce71 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Mon, 21 Oct 2013 18:59:28 +0100 Subject: [PATCH 045/256] Remove hard coded rgb --- v4l2capture.cpp | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index a41a0aa..338bdb6 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -990,6 +990,7 @@ class Device_manager_Worker_thread_args int buffer_counts; std::string pxFmt; int verbose; + std::string targetFmt; std::vector decodedFrameBuff; std::vector decodedFrameLenBuff; @@ -1010,6 +1011,7 @@ class Device_manager_Worker_thread_args frameHeight = 0; decodedFrameBuffMaxSize = 10; verbose = 1; + targetFmt = "RGB24"; } virtual ~Device_manager_Worker_thread_args() @@ -1137,12 +1139,11 @@ class Device_manager_Worker_thread_args unsigned char *rgbBuff = NULL; unsigned rgbBuffLen = 0; - char targetFmt[] = "RGB24"; int ok = DecodeFrame((const unsigned char*)this->buffers[buffer.index].start, buffer.bytesused, this->pxFmt.c_str(), this->frameWidth, this->frameHeight, - targetFmt, &rgbBuff, &rgbBuffLen); + this->targetFmt.c_str(), &rgbBuff, &rgbBuffLen); if(ok) { @@ -1161,7 +1162,7 @@ class Device_manager_Worker_thread_args } else { - if(verbose) printf("Failed to convert from %s to %s\n", this->pxFmt.c_str(), targetFmt); + if(verbose) printf("Failed to convert from %s to %s\n", this->pxFmt.c_str(), this->targetFmt.c_str()); if(rgbBuff != NULL) { delete [] rgbBuff; From 08e10b68dc8d4d64f7aad8e692fac35366b73adc Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Mon, 21 Oct 2013 19:07:31 +0100 Subject: [PATCH 046/256] Add YUYV decoding --- v4l2capture.cpp | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 338bdb6..df896fa 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -937,6 +937,44 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, return 1; } + if(strcmp(inPxFmt,"YUYV")==0 && strcmp(targetPxFmt, "RGB24")==0) + { + // Convert buffer from YUYV to RGB. + // For the byte order, see: http://v4l2spec.bytesex.org/spec/r4339.htm + // For the color conversion, see: http://v4l2spec.bytesex.org/spec/x2123.htm + *buffOutLen = dataLen * 6 / 4; + char *rgb = new char[*buffOutLen]; + *buffOut = (unsigned char*)rgb; + + char *rgb_max = rgb + *buffOutLen; + const unsigned char *yuyv = data; + + #define CLAMP(c) ((c) <= 0 ? 0 : (c) >= 65025 ? 255 : (c) >> 8) + while(rgb < rgb_max) + { + int u = yuyv[1] - 128; + int v = yuyv[3] - 128; + int uv = 100 * u + 208 * v; + u *= 516; + v *= 409; + + int y = 298 * (yuyv[0] - 16); + rgb[0] = CLAMP(y + v); + rgb[1] = CLAMP(y - uv); + rgb[2] = CLAMP(y + u); + + y = 298 * (yuyv[2] - 16); + rgb[3] = CLAMP(y + v); + rgb[4] = CLAMP(y - uv); + rgb[5] = CLAMP(y + u); + + rgb += 6; + yuyv += 4; + } + #undef CLAMP + return 1; + } + return 0; } From f2978134652dcdcb259e190da8d84d0c924ab5e0 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Mon, 21 Oct 2013 19:14:05 +0100 Subject: [PATCH 047/256] Skip conversion if not needed --- v4l2capture.cpp | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index df896fa..d28c431 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -909,6 +909,15 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, *buffOut = NULL; *buffOutLen = 0; + if(strcmp(inPxFmt, targetPxFmt) == 0) + { + //Conversion not required, return a shallow copy + *buffOutLen = dataLen; + *buffOut = new unsigned char[dataLen]; + memcpy(*buffOut, data, dataLen); + return 1; + } + if(strcmp(inPxFmt,"MJPEG")==0 && strcmp(targetPxFmt, "RGB24")==0) { std::string jpegBin; From 3ddabe4ac0a1d8a12e0d303844a8554535024f23 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Mon, 21 Oct 2013 20:00:39 +0100 Subject: [PATCH 048/256] Pass meta data to python --- v4l2capture.cpp | 94 +++++++++++++++++++++++++++++++++++++------------ 1 file changed, 72 insertions(+), 22 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index d28c431..06c4a81 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -1016,6 +1016,43 @@ class SetFormatParams } }; +class FrameMetaData +{ +public: + std::string fmt; + int width; + int height; + unsigned buffLen; + unsigned long sequence; + unsigned long tv_sec; + unsigned long tv_usec; + + FrameMetaData() + { + width = 0; + height = 0; + buffLen = 0; + sequence = 0; + tv_sec = 0; + tv_usec = 0; + } + + FrameMetaData(const FrameMetaData &in) + { + FrameMetaData::operator=(in); + } + + const FrameMetaData &operator=(const FrameMetaData &in) + { + width = in.width; + height = in.height; + fmt = in.fmt; + buffLen = in.buffLen; + return *this; + } + +}; + class Device_manager_Worker_thread_args { @@ -1040,7 +1077,7 @@ class Device_manager_Worker_thread_args std::string targetFmt; std::vector decodedFrameBuff; - std::vector decodedFrameLenBuff; + std::vector decodedFrameMetaBuff; unsigned decodedFrameBuffMaxSize; Device_manager_Worker_thread_args(const char *devNameIn) @@ -1145,23 +1182,23 @@ class Device_manager_Worker_thread_args pthread_mutex_unlock(&this->lock); } - int GetFrame(unsigned char **buffOut, unsigned *buffLenOut) + int GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) { pthread_mutex_lock(&this->lock); if(this->decodedFrameBuff.size()==0) { //No frame found *buffOut = NULL; - *buffLenOut = 0; + metaOut = NULL; pthread_mutex_unlock(&this->lock); return 0; } //Return frame *buffOut = this->decodedFrameBuff[0]; - *buffLenOut = this->decodedFrameLenBuff[0]; + *metaOut = this->decodedFrameMetaBuff[0]; this->decodedFrameBuff.erase(this->decodedFrameBuff.begin()); - this->decodedFrameLenBuff.erase(this->decodedFrameLenBuff.begin()); + this->decodedFrameMetaBuff.erase(this->decodedFrameMetaBuff.begin()); pthread_mutex_unlock(&this->lock); return 1; } @@ -1198,11 +1235,21 @@ class Device_manager_Worker_thread_args { pthread_mutex_lock(&this->lock); this->decodedFrameBuff.push_back(rgbBuff); - this->decodedFrameLenBuff.push_back(rgbBuffLen); + + class FrameMetaData meta; + meta.buffLen = rgbBuffLen; + meta.width = this->frameWidth; + meta.height = this->frameHeight; + meta.fmt = this->pxFmt; + meta.sequence = buffer.sequence; + meta.tv_sec = buffer.timestamp.tv_sec; + meta.tv_usec = buffer.timestamp.tv_usec; + + this->decodedFrameMetaBuff.push_back(meta); while(this->decodedFrameBuff.size() > this->decodedFrameBuffMaxSize) { this->decodedFrameBuff.erase(this->decodedFrameBuff.begin()); - this->decodedFrameLenBuff.erase(this->decodedFrameLenBuff.begin()); + this->decodedFrameMetaBuff.erase(this->decodedFrameMetaBuff.begin()); } pthread_mutex_unlock(&this->lock); } @@ -1217,20 +1264,9 @@ class Device_manager_Worker_thread_args } } - //PyObject *out = result; - - if(1) - { - /*out = PyTuple_New(4); - PyTuple_SetItem(out, 0, result); - PyTuple_SetItem(out, 1, PyInt_FromLong(buffer.timestamp.tv_sec)); - PyTuple_SetItem(out, 2, PyInt_FromLong(buffer.timestamp.tv_usec)); - PyTuple_SetItem(out, 3, PyInt_FromLong(buffer.sequence));*/ - } - if(my_ioctl(this->fd, VIDIOC_QBUF, &buffer)) { - //Py_RETURN_NONE; + throw std::runtime_error("VIDIOC_QBUF failed"); } return 1; @@ -1319,6 +1355,8 @@ class Device_manager_Worker_thread_args this->pxFmt = "Unknown"; break; } + + if(verbose) printf("Current format %s %i %i\n", this->pxFmt.c_str(), this->frameWidth, this->frameHeight); return 1; } @@ -1682,12 +1720,24 @@ static PyObject *Device_manager_Get_frame(Device_manager *self, PyObject *args) class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; unsigned char *buffOut = NULL; - unsigned buffLenOut = 0; + class FrameMetaData metaOut; - int ok = threadArgs->GetFrame(&buffOut, &buffLenOut); + int ok = threadArgs->GetFrame(&buffOut, &metaOut); if(ok && buffOut != NULL) { - PyObject *out = PyByteArray_FromStringAndSize((char *)buffOut, buffLenOut); + //Format output to python + PyObject *pymeta = PyDict_New(); + PyDict_SetItemString(pymeta, "width", PyInt_FromLong(metaOut.width)); + PyDict_SetItemString(pymeta, "height", PyInt_FromLong(metaOut.height)); + PyDict_SetItemString(pymeta, "format", PyString_FromString(metaOut.fmt.c_str())); + PyDict_SetItemString(pymeta, "sequence", PyInt_FromLong(metaOut.sequence)); + PyDict_SetItemString(pymeta, "tv_sec", PyInt_FromLong(metaOut.tv_sec)); + PyDict_SetItemString(pymeta, "tv_usec", PyInt_FromLong(metaOut.tv_usec)); + + PyObject *out = PyTuple_New(2); + PyTuple_SetItem(out, 0, PyByteArray_FromStringAndSize((char *)buffOut, metaOut.buffLen)); + PyTuple_SetItem(out, 1, pymeta); + delete [] buffOut; return out; } From bdbbf7ce6dc3f99500bb7164aa9c5ae04ed9d68e Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Mon, 21 Oct 2013 20:05:16 +0100 Subject: [PATCH 049/256] Fix meta data --- v4l2capture.cpp | 3 +++ 1 file changed, 3 insertions(+) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 06c4a81..8ee91a2 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -1048,6 +1048,9 @@ class FrameMetaData height = in.height; fmt = in.fmt; buffLen = in.buffLen; + sequence = in.sequence; + tv_sec = in.tv_sec; + tv_usec = in.tv_usec; return *this; } From fc5bcbe88e57e306ed820af55c73bca318651253 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Mon, 21 Oct 2013 20:15:26 +0100 Subject: [PATCH 050/256] If frame cannot be decoded, return encoded frame --- v4l2capture.cpp | 57 ++++++++++++++++++++++++------------------------- 1 file changed, 28 insertions(+), 29 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 8ee91a2..77aac39 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -1232,41 +1232,40 @@ class Device_manager_Worker_thread_args this->frameHeight, this->targetFmt.c_str(), &rgbBuff, &rgbBuffLen); - if(ok) + //Return a frame, decoded or not + pthread_mutex_lock(&this->lock); + + class FrameMetaData meta; + meta.width = this->frameWidth; + meta.height = this->frameHeight; + if(ok && rgbBuff != NULL) { - if(rgbBuff != NULL) - { - pthread_mutex_lock(&this->lock); - this->decodedFrameBuff.push_back(rgbBuff); - - class FrameMetaData meta; - meta.buffLen = rgbBuffLen; - meta.width = this->frameWidth; - meta.height = this->frameHeight; - meta.fmt = this->pxFmt; - meta.sequence = buffer.sequence; - meta.tv_sec = buffer.timestamp.tv_sec; - meta.tv_usec = buffer.timestamp.tv_usec; - - this->decodedFrameMetaBuff.push_back(meta); - while(this->decodedFrameBuff.size() > this->decodedFrameBuffMaxSize) - { - this->decodedFrameBuff.erase(this->decodedFrameBuff.begin()); - this->decodedFrameMetaBuff.erase(this->decodedFrameMetaBuff.begin()); - } - pthread_mutex_unlock(&this->lock); - } + meta.fmt = this->targetFmt; + meta.buffLen = rgbBuffLen; + this->decodedFrameBuff.push_back(rgbBuff); } else { - if(verbose) printf("Failed to convert from %s to %s\n", this->pxFmt.c_str(), this->targetFmt.c_str()); - if(rgbBuff != NULL) - { - delete [] rgbBuff; - rgbBuff = NULL; - } + //Make a copy of un-decodable buffer to return + unsigned char* buffOut = new unsigned char[buffer.bytesused]; + memcpy(buffOut, this->buffers[buffer.index].start, buffer.bytesused); + meta.fmt = this->pxFmt; + meta.buffLen = buffer.bytesused; + this->decodedFrameBuff.push_back(buffOut); + } + meta.sequence = buffer.sequence; + meta.tv_sec = buffer.timestamp.tv_sec; + meta.tv_usec = buffer.timestamp.tv_usec; + + this->decodedFrameMetaBuff.push_back(meta); + while(this->decodedFrameBuff.size() > this->decodedFrameBuffMaxSize) + { + this->decodedFrameBuff.erase(this->decodedFrameBuff.begin()); + this->decodedFrameMetaBuff.erase(this->decodedFrameMetaBuff.begin()); } + pthread_mutex_unlock(&this->lock); + //Queue buffer for next frame if(my_ioctl(this->fd, VIDIOC_QBUF, &buffer)) { throw std::runtime_error("VIDIOC_QBUF failed"); From 6d072e2635f2236ed9a473a49ef1eb0256d3cc2c Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Mon, 21 Oct 2013 21:29:48 +0100 Subject: [PATCH 051/256] List available devices --- v4l2capture.cpp | 28 +++++++++++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 77aac39..02198ed 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -19,6 +19,7 @@ #include #include #include +#include #ifdef USE_LIBV4L #include @@ -1787,6 +1788,29 @@ static PyObject *Device_manager_close(Device_manager *self, PyObject *args) Py_RETURN_NONE; } +static PyObject *Device_manager_list_devices(Device_manager *self) +{ + PyObject *out = PyList_New(0); + const char dir[] = "/dev"; + DIR *dp; + struct dirent *dirp; + if((dp = opendir(dir)) == NULL) { + printf("Error(%d) opening %s\n", errno, dir); + Py_RETURN_NONE; + } + + while ((dirp = readdir(dp)) != NULL) { + if (strncmp(dirp->d_name, "video", 5) != 0) continue; + std::string tmp = "/dev/"; + tmp.append(dirp->d_name); + PyList_Append(out, PyString_FromString(tmp.c_str())); + } + closedir(dp); + + PyList_Sort(out); + return out; +} + // ********************************************************************* static PyMethodDef Video_device_methods[] = { @@ -1876,7 +1900,9 @@ static PyMethodDef Device_manager_methods[] = { {"close", (PyCFunction)Device_manager_close, METH_VARARGS, "close(dev = '\\dev\\video0')\n\n" "Close video device. Subsequent calls to other methods will fail."}, - + {"list_devices", (PyCFunction)Device_manager_list_devices, METH_NOARGS, + "list_devices()\n\n" + "List available capture devices."}, {NULL} }; From 0dfcea7fffb254c1051e6b9736f7a6a65a5710dd Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Tue, 22 Oct 2013 01:22:05 +0100 Subject: [PATCH 052/256] Added YUV2 but actually need YVU420 --- v4l2capture.cpp | 92 +++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 78 insertions(+), 14 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 02198ed..d9759b5 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -14,6 +14,7 @@ #include #include #include +#include #include #include #include @@ -126,7 +127,6 @@ static int my_ioctl(int fd, int request, void *arg, int utimeout = -1) if(errno != EINTR) { - PyErr_SetFromErrno(PyExc_IOError); return 1; } usleep(1000); @@ -330,7 +330,11 @@ static PyObject *Video_device_get_format(Video_device *self) pixFormatStr = PyString_FromString("YUYV"); break; default: - pixFormatStr = PyString_FromString("Unknown"); + std::string tmp("Unknown"); + std::ostringstream oss; + oss << format.fmt.pix.pixelformat; + tmp.append(oss.str()); + pixFormatStr = PyString_FromString(tmp.c_str()); break; } PyTuple_SetItem(out, 2, pixFormatStr); @@ -985,6 +989,57 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, return 1; } + /* + //Untested code + if((strcmp(inPxFmt,"YUV2")==0 || strcmp(inPxFmt,"YVU2")==0) + && strcmp(targetPxFmt, "RGB24")==0) + { + int uoff = 1; + int voff = 3; + if(strcmp(inPxFmt,"YUV2")==0) + { + uoff = 1; + voff = 3; + } + if(strcmp(inPxFmt,"YVU2")==0) + { + uoff = 3; + voff = 1; + } + + int stride = width * 4; + int hwidth = width/2; + for(int lineNum=0; lineNum < height; lineNum++) + { + int lineOffset = lineNum * stride; + int outOffset = lineNum * width * 3; + + for(int pxPairNum=0; pxPairNum < hwidth; pxPairNum++) + { + unsigned char Y1 = data[pxPairNum * 4 + lineOffset]; + unsigned char Cb = data[pxPairNum * 4 + lineOffset + uoff]; + unsigned char Y2 = data[pxPairNum * 4 + lineOffset + 2]; + unsigned char Cr = data[pxPairNum * 4 + lineOffset + voff]; + + //ITU-R BT.601 colour conversion + double R1 = (Y1 + 1.402 * (Cr - 128)); + double G1 = (Y1 - 0.344 * (Cb - 128) - 0.714 * (Cr - 128)); + double B1 = (Y1 + 1.772 * (Cb - 128)); + double R2 = (Y2 + 1.402 * (Cr - 128)); + double G2 = (Y2 - 0.344 * (Cb - 128) - 0.714 * (Cr - 128)); + double B2 = (Y2 + 1.772 * (Cb - 128)); + + (*buffOut)[outOffset + pxPairNum * 6] = R1; + (*buffOut)[outOffset + pxPairNum * 6 + 1] = G1; + (*buffOut)[outOffset + pxPairNum * 6 + 2] = B1; + (*buffOut)[outOffset + pxPairNum * 6 + 3] = R2; + (*buffOut)[outOffset + pxPairNum * 6 + 4] = G2; + (*buffOut)[outOffset + pxPairNum * 6 + 5] = B2; + } + } + } + */ + return 0; } @@ -1308,6 +1363,8 @@ class Device_manager_Worker_thread_args format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; if(strcmp(args.fmt.c_str(), "YUV420")==0) format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420; + if(strcmp(args.fmt.c_str(), "YVU420")==0) + format.fmt.pix.pixelformat = V4L2_PIX_FMT_YVU420; if(strcmp(args.fmt.c_str(), "YUYV")==0) format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; @@ -1351,11 +1408,18 @@ class Device_manager_Worker_thread_args case V4L2_PIX_FMT_YUV420: this->pxFmt = "YUV420"; break; + case V4L2_PIX_FMT_YVU420: + this->pxFmt = "YVU420"; + break; case V4L2_PIX_FMT_YUYV: this->pxFmt = "YUYV"; break; default: - this->pxFmt = "Unknown"; + this->pxFmt = "Unknown "; + std::ostringstream oss; + oss << format.fmt.pix.pixelformat; + this->pxFmt.append(oss.str()); + break; } @@ -1453,7 +1517,7 @@ class Device_manager_Worker_thread_args if(my_ioctl(fd, VIDIOC_QBUF, &buffer)) { - throw std::runtime_error("VIDIOC_QBUF failed"); + //This may fail with some devices but does not seem to be harmful. } } @@ -1792,20 +1856,20 @@ static PyObject *Device_manager_list_devices(Device_manager *self) { PyObject *out = PyList_New(0); const char dir[] = "/dev"; - DIR *dp; - struct dirent *dirp; - if((dp = opendir(dir)) == NULL) { - printf("Error(%d) opening %s\n", errno, dir); - Py_RETURN_NONE; - } - - while ((dirp = readdir(dp)) != NULL) { + DIR *dp; + struct dirent *dirp; + if((dp = opendir(dir)) == NULL) { + printf("Error(%d) opening %s\n", errno, dir); + Py_RETURN_NONE; + } + + while ((dirp = readdir(dp)) != NULL) { if (strncmp(dirp->d_name, "video", 5) != 0) continue; std::string tmp = "/dev/"; tmp.append(dirp->d_name); PyList_Append(out, PyString_FromString(tmp.c_str())); - } - closedir(dp); + } + closedir(dp); PyList_Sort(out); return out; From 119a2b30cf2349951742120f14e548408a1d428c Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Tue, 22 Oct 2013 19:21:40 +0100 Subject: [PATCH 053/256] Remove original capture approach --- v4l2capture.cpp | 523 +----------------------------------------------- 1 file changed, 2 insertions(+), 521 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index d9759b5..d6707d6 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -67,7 +67,7 @@ struct capability { const char *name; }; -static struct capability capabilities[] = { +/*static struct capability capabilities[] = { { V4L2_CAP_ASYNCIO, "asyncio" }, { V4L2_CAP_AUDIO, "audio" }, { V4L2_CAP_HW_FREQ_SEEK, "hw_freq_seek" }, @@ -84,7 +84,7 @@ static struct capability capabilities[] = { { V4L2_CAP_VIDEO_OUTPUT, "video_output" }, { V4L2_CAP_VIDEO_OUTPUT_OVERLAY, "video_output_overlay" }, { V4L2_CAP_VIDEO_OVERLAY, "video_overlay" } -}; +};*/ static int my_ioctl(int fd, int request, void *arg, int utimeout = -1) { @@ -133,451 +133,6 @@ static int my_ioctl(int fd, int request, void *arg, int utimeout = -1) } } -static void Video_device_unmap(Video_device *self) -{ - int i; - - for(i = 0; i < self->buffer_count; i++) - { - v4l2_munmap(self->buffers[i].start, self->buffers[i].length); - } - free(self->buffers); - self->buffers = NULL; -} - -static void Video_device_dealloc(Video_device *self) -{ - if(self->fd >= 0) - { - if(self->buffers) - { - Video_device_unmap(self); - } - - v4l2_close(self->fd); - } - - self->ob_type->tp_free((PyObject *)self); -} - -static int Video_device_init(Video_device *self, PyObject *args, - PyObject *kwargs) -{ - const char *device_path; - - if(!PyArg_ParseTuple(args, "s", &device_path)) - { - return -1; - } - - int fd = v4l2_open(device_path, O_RDWR | O_NONBLOCK); - - if(fd < 0) - { - PyErr_SetFromErrnoWithFilename(PyExc_IOError, (char *)device_path); - return -1; - } - - self->fd = fd; - self->buffers = NULL; - return 0; -} - -static PyObject *Video_device_close(Video_device *self) -{ - if(self->fd >= 0) - { - if(self->buffers) - { - Video_device_unmap(self); - } - - v4l2_close(self->fd); - self->fd = -1; - } - - Py_RETURN_NONE; -} - -static PyObject *Video_device_fileno(Video_device *self) -{ - ASSERT_OPEN; - return PyInt_FromLong(self->fd); -} - -static PyObject *Video_device_get_info(Video_device *self) -{ - ASSERT_OPEN; - struct v4l2_capability caps; - - if(my_ioctl(self->fd, VIDIOC_QUERYCAP, &caps)) - { - Py_RETURN_NONE; - } - - PyObject *set = PySet_New(NULL); - - if(!set) - { - Py_RETURN_NONE; - } - - struct capability *capability = capabilities; - - while(capability < (struct capability *)(capabilities + sizeof(capabilities))) - { - if(caps.capabilities & capability->id) - { - PyObject *s = PyString_FromString(capability->name); - - if(!s) - { - Py_DECREF(set); - Py_RETURN_NONE; - } - - PySet_Add(set, s); - } - - capability++; - } - - return Py_BuildValue("sssO", caps.driver, caps.card, caps.bus_info, set); -} - -static PyObject *Video_device_set_format(Video_device *self, PyObject *args) -{ - int size_x; - int size_y; - const char *fmt = NULL; - - if(!PyArg_ParseTuple(args, "ii|s", &size_x, &size_y, &fmt)) - { - Py_RETURN_NONE; - } - - struct v4l2_format format; - format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - format.fmt.pix.width = size_x; - format.fmt.pix.height = size_y; -#ifdef USE_LIBV4L - format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; - if(fmt != NULL && strcmp(fmt, "MJPEG")==0) - format.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; - if(fmt != NULL && strcmp(fmt, "RGB24")==0) - format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; - if(fmt != NULL && strcmp(fmt, "YUV420")==0) - format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420; -#else - format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; -#endif - format.fmt.pix.field = V4L2_FIELD_NONE; - format.fmt.pix.bytesperline = 0; - - if(my_ioctl(self->fd, VIDIOC_S_FMT, &format)) - { - Py_RETURN_NONE; - } - - return Py_BuildValue("ii", format.fmt.pix.width, format.fmt.pix.height); -} - -static PyObject *Video_device_set_fps(Video_device *self, PyObject *args) -{ - int fps; - if(!PyArg_ParseTuple(args, "i", &fps)) - { - Py_RETURN_NONE; - } - struct v4l2_streamparm setfps; - memset(&setfps, 0, sizeof(struct v4l2_streamparm)); - setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - setfps.parm.capture.timeperframe.numerator = 1; - setfps.parm.capture.timeperframe.denominator = fps; - if(my_ioctl(self->fd, VIDIOC_S_PARM, &setfps)){ - Py_RETURN_NONE; - } - return Py_BuildValue("i",setfps.parm.capture.timeperframe.denominator); -} - -static PyObject *Video_device_get_format(Video_device *self) -{ - - struct v4l2_format format; - format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - if(my_ioctl(self->fd, VIDIOC_G_FMT, &format)) - { - Py_RETURN_NONE; - } - - PyObject *out = PyTuple_New(3); - PyTuple_SetItem(out, 0, PyInt_FromLong(format.fmt.pix.width)); - PyTuple_SetItem(out, 1, PyInt_FromLong(format.fmt.pix.height)); - - PyObject *pixFormatStr = NULL; - switch(format.fmt.pix.pixelformat) - { - case V4L2_PIX_FMT_MJPEG: - pixFormatStr = PyString_FromString("MJPEG"); - break; - case V4L2_PIX_FMT_RGB24: - pixFormatStr = PyString_FromString("RGB24"); - break; - case V4L2_PIX_FMT_YUV420: - pixFormatStr = PyString_FromString("YUV420"); - break; - case V4L2_PIX_FMT_YUYV: - pixFormatStr = PyString_FromString("YUYV"); - break; - default: - std::string tmp("Unknown"); - std::ostringstream oss; - oss << format.fmt.pix.pixelformat; - tmp.append(oss.str()); - pixFormatStr = PyString_FromString(tmp.c_str()); - break; - } - PyTuple_SetItem(out, 2, pixFormatStr); - return out; - -} - -static PyObject *Video_device_start(Video_device *self) -{ - ASSERT_OPEN; - enum v4l2_buf_type type; - type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - - if(my_ioctl(self->fd, VIDIOC_STREAMON, &type)) - { - Py_RETURN_NONE; - } - - Py_RETURN_NONE; -} - -static PyObject *Video_device_stop(Video_device *self) -{ - ASSERT_OPEN; - enum v4l2_buf_type type; - type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - - if(my_ioctl(self->fd, VIDIOC_STREAMOFF, &type)) - { - Py_RETURN_NONE; - } - - Py_RETURN_NONE; -} - -static PyObject *Video_device_create_buffers(Video_device *self, PyObject *args) -{ - int buffer_count; - - if(!PyArg_ParseTuple(args, "I", &buffer_count)) - { - Py_RETURN_NONE; - } - - ASSERT_OPEN; - - if(self->buffers) - { - PyErr_SetString(PyExc_ValueError, "Buffers are already created"); - Py_RETURN_NONE; - } - - struct v4l2_requestbuffers reqbuf; - reqbuf.count = buffer_count; - reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - reqbuf.memory = V4L2_MEMORY_MMAP; - - if(my_ioctl(self->fd, VIDIOC_REQBUFS, &reqbuf)) - { - Py_RETURN_NONE; - } - - if(!reqbuf.count) - { - PyErr_SetString(PyExc_IOError, "Not enough buffer memory"); - Py_RETURN_NONE; - } - - self->buffers = (struct buffer *)malloc(reqbuf.count * sizeof(struct buffer)); - - if(!self->buffers) - { - PyErr_NoMemory(); - Py_RETURN_NONE; - } - - unsigned int i; - - for(i = 0; i < reqbuf.count; i++) - { - struct v4l2_buffer buffer; - buffer.index = i; - buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - buffer.memory = V4L2_MEMORY_MMAP; - - if(my_ioctl(self->fd, VIDIOC_QUERYBUF, &buffer)) - { - Py_RETURN_NONE; - } - - self->buffers[i].length = buffer.length; - self->buffers[i].start = v4l2_mmap(NULL, buffer.length, - PROT_READ | PROT_WRITE, MAP_SHARED, self->fd, buffer.m.offset); - - if(self->buffers[i].start == MAP_FAILED) - { - PyErr_SetFromErrno(PyExc_IOError); - Py_RETURN_NONE; - } - } - - self->buffer_count = i; - Py_RETURN_NONE; -} - -static PyObject *Video_device_queue_all_buffers(Video_device *self) -{ - if(!self->buffers) - { - ASSERT_OPEN; - PyErr_SetString(PyExc_ValueError, "Buffers have not been created"); - Py_RETURN_NONE; - } - - int i; - int buffer_count = self->buffer_count; - - for(i = 0; i < buffer_count; i++) - { - struct v4l2_buffer buffer; - buffer.index = i; - buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - buffer.memory = V4L2_MEMORY_MMAP; - - if(my_ioctl(self->fd, VIDIOC_QBUF, &buffer)) - { - Py_RETURN_NONE; - } - } - - Py_RETURN_NONE; -} - -static PyObject *Video_device_read_internal(Video_device *self, int queue, int return_timestamp) -{ - if(!self->buffers) - { - ASSERT_OPEN; - PyErr_SetString(PyExc_ValueError, "Buffers have not been created"); - Py_RETURN_NONE; - } - - struct v4l2_buffer buffer; - buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - buffer.memory = V4L2_MEMORY_MMAP; - - if(my_ioctl(self->fd, VIDIOC_DQBUF, &buffer)) - { - Py_RETURN_NONE; - } - -#ifdef USE_LIBV4L - PyObject *result = PyString_FromStringAndSize( - (const char*)self->buffers[buffer.index].start, buffer.bytesused); - - if(!result) - { - Py_RETURN_NONE; - } -#else - // Convert buffer from YUYV to RGB. - // For the byte order, see: http://v4l2spec.bytesex.org/spec/r4339.htm - // For the color conversion, see: http://v4l2spec.bytesex.org/spec/x2123.htm - int length = buffer.bytesused * 6 / 4; - PyObject *result = PyString_FromStringAndSize(NULL, length); - - if(!result) - { - Py_RETURN_NONE; - } - - char *rgb = PyString_AS_STRING(result); - char *rgb_max = rgb + length; - unsigned char *yuyv = self->buffers[buffer.index].start; - -#define CLAMP(c) ((c) <= 0 ? 0 : (c) >= 65025 ? 255 : (c) >> 8) - while(rgb < rgb_max) - { - int u = yuyv[1] - 128; - int v = yuyv[3] - 128; - int uv = 100 * u + 208 * v; - u *= 516; - v *= 409; - - int y = 298 * (yuyv[0] - 16); - rgb[0] = CLAMP(y + v); - rgb[1] = CLAMP(y - uv); - rgb[2] = CLAMP(y + u); - - y = 298 * (yuyv[2] - 16); - rgb[3] = CLAMP(y + v); - rgb[4] = CLAMP(y - uv); - rgb[5] = CLAMP(y + u); - - rgb += 6; - yuyv += 4; - } -#undef CLAMP -#endif - - PyObject *out = result; - - if(return_timestamp) - { - out = PyTuple_New(4); - PyTuple_SetItem(out, 0, result); - PyTuple_SetItem(out, 1, PyInt_FromLong(buffer.timestamp.tv_sec)); - PyTuple_SetItem(out, 2, PyInt_FromLong(buffer.timestamp.tv_usec)); - PyTuple_SetItem(out, 3, PyInt_FromLong(buffer.sequence)); - } - - if(queue && my_ioctl(self->fd, VIDIOC_QBUF, &buffer)) - { - Py_RETURN_NONE; - } - - return out; -} - -static PyObject *Video_device_read(Video_device *self, PyObject *args) -{ - int return_timestamp=0; - - if(!PyArg_ParseTuple(args, "|i", &return_timestamp)) - { - Py_RETURN_NONE; - } - - return Video_device_read_internal(self, 0, return_timestamp); -} - -static PyObject *Video_device_read_and_queue(Video_device *self, PyObject *args) -{ - int return_timestamp=0; - - if(!PyArg_ParseTuple(args, "|i", &return_timestamp)) - { - Py_RETURN_NONE; - } - - return Video_device_read_internal(self, 1, return_timestamp); -} - // ********************************************************************* #define HUFFMAN_SEGMENT_LEN 420 @@ -1877,72 +1432,6 @@ static PyObject *Device_manager_list_devices(Device_manager *self) // ********************************************************************* -static PyMethodDef Video_device_methods[] = { - {"close", (PyCFunction)Video_device_close, METH_NOARGS, - "close()\n\n" - "Close video device. Subsequent calls to other methods will fail."}, - {"fileno", (PyCFunction)Video_device_fileno, METH_NOARGS, - "fileno() -> integer \"file descriptor\".\n\n" - "This enables video devices to be passed select.select for waiting " - "until a frame is available for reading."}, - {"get_info", (PyCFunction)Video_device_get_info, METH_NOARGS, - "get_info() -> driver, card, bus_info, capabilities\n\n" - "Returns three strings with information about the video device, and one " - "set containing strings identifying the capabilities of the video " - "device."}, - {"set_format", (PyCFunction)Video_device_set_format, METH_VARARGS, - "set_format(size_x, size_y, pixel_format='RGB24') -> size_x, size_y\n\n" - "Request the video device to set image size and format. The device may " - "choose another size than requested and will return its choice. The " - "pixel format may be either RGB24, YUV420 or MJPEG."}, - {"get_format", (PyCFunction)Video_device_get_format, METH_NOARGS, - "get_format() -> size_x, size_y\n\n"}, - {"set_fps", (PyCFunction)Video_device_set_fps, METH_VARARGS, - "set_fps(fps) -> fps \n\n" - "Request the video device to set frame per seconds.The device may " - "choose another frame rate than requested and will return its choice. " }, - {"start", (PyCFunction)Video_device_start, METH_NOARGS, - "start()\n\n" - "Start video capture."}, - {"stop", (PyCFunction)Video_device_stop, METH_NOARGS, - "stop()\n\n" - "Stop video capture."}, - {"create_buffers", (PyCFunction)Video_device_create_buffers, METH_VARARGS, - "create_buffers(count)\n\n" - "Create buffers used for capturing image data. Can only be called once " - "for each video device object."}, - {"queue_all_buffers", (PyCFunction)Video_device_queue_all_buffers, - METH_NOARGS, - "queue_all_buffers()\n\n" - "Let the video device fill all buffers created."}, - {"read", (PyCFunction)Video_device_read, METH_VARARGS, - "read(get_timestamp) -> string or tuple\n\n" - "Reads image data from a buffer that has been filled by the video " - "device. The image data is in RGB24, YUV420 or MJPEG format as decided by " - "'set_format'. The buffer is removed from the queue. Fails if no buffer " - "is filled. Use select.select to check for filled buffers. If " - "get_timestamp is true, a tuple is turned containing (sec, microsec, " - "sequence number)"}, - {"read_and_queue", (PyCFunction)Video_device_read_and_queue, METH_VARARGS, - "read_and_queue(get_timestamp)\n\n" - "Same as 'read', but adds the buffer back to the queue so the video " - "device can fill it again."}, - {NULL} -}; - -static PyTypeObject Video_device_type = { - PyObject_HEAD_INIT(NULL) - 0, "v4l2capture.Video_device", sizeof(Video_device), 0, - (destructor)Video_device_dealloc, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, Py_TPFLAGS_DEFAULT, "Video_device(path)\n\nOpens the video device at " - "the given path and returns an object that can capture images. The " - "constructor and all methods except close may raise IOError.", 0, 0, 0, - 0, 0, 0, Video_device_methods, 0, 0, 0, 0, 0, 0, 0, - (initproc)Video_device_init -}; - -// ********************************************************************* - static PyMethodDef Device_manager_methods[] = { {"open", (PyCFunction)Device_manager_open, METH_VARARGS, "open(dev = '\\dev\\video0')\n\n" @@ -1990,14 +1479,8 @@ static PyMethodDef module_methods[] = { PyMODINIT_FUNC initv4l2capture(void) { - Video_device_type.tp_new = PyType_GenericNew; Device_manager_type.tp_new = PyType_GenericNew; - if(PyType_Ready(&Video_device_type) < 0) - { - return; - } - if(PyType_Ready(&Device_manager_type) < 0) { return; @@ -2011,8 +1494,6 @@ PyMODINIT_FUNC initv4l2capture(void) return; } - Py_INCREF(&Video_device_type); - PyModule_AddObject(module, "Video_device", (PyObject *)&Video_device_type); Py_INCREF(&Device_manager_type); PyModule_AddObject(module, "Device_manager", (PyObject *)&Device_manager_type); From eaa2a4b0ae1ffce9ac0009aa6aad50e36ef4460e Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Tue, 22 Oct 2013 19:39:25 +0100 Subject: [PATCH 054/256] Adding a class for video output --- setup.py | 2 +- v4l2capture.cpp | 7 +++++++ v4l2out.cpp | 33 +++++++++++++++++++++++++++++++++ v4l2out.h | 46 ++++++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 87 insertions(+), 1 deletion(-) create mode 100644 v4l2out.cpp create mode 100644 v4l2out.h diff --git a/setup.py b/setup.py index 4ac9448..57fb656 100755 --- a/setup.py +++ b/setup.py @@ -25,5 +25,5 @@ "License :: Public Domain", "Programming Language :: C++"], ext_modules = [ - Extension("v4l2capture", ["v4l2capture.cpp"], libraries = ["v4l2", "pthread", "jpeg"])]) + Extension("v4l2capture", ["v4l2capture.cpp", "v4l2out.cpp"], libraries = ["v4l2", "pthread", "jpeg"])]) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index d6707d6..c28cf2e 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -21,6 +21,7 @@ #include #include #include +#include "v4l2out.h" #ifdef USE_LIBV4L #include @@ -1480,11 +1481,16 @@ static PyMethodDef module_methods[] = { PyMODINIT_FUNC initv4l2capture(void) { Device_manager_type.tp_new = PyType_GenericNew; + Video_out_manager_type.tp_new = PyType_GenericNew; if(PyType_Ready(&Device_manager_type) < 0) { return; } + if(PyType_Ready(&Video_out_manager_type) < 0) + { + return; + } PyObject *module = Py_InitModule3("v4l2capture", module_methods, "Capture video with video4linux2."); @@ -1496,5 +1502,6 @@ PyMODINIT_FUNC initv4l2capture(void) Py_INCREF(&Device_manager_type); PyModule_AddObject(module, "Device_manager", (PyObject *)&Device_manager_type); + PyModule_AddObject(module, "Video_out_manager", (PyObject *)&Video_out_manager_type); } diff --git a/v4l2out.cpp b/v4l2out.cpp new file mode 100644 index 0000000..7e74ab8 --- /dev/null +++ b/v4l2out.cpp @@ -0,0 +1,33 @@ + +#include "v4l2out.h" + +int Video_out_manager_init(Video_out_manager *self, PyObject *args, + PyObject *kwargs) +{ + //self->threadArgStore = new std::map; + return 0; +} + +void Video_out_manager_dealloc(Video_out_manager *self) +{ + //Stop high level threads + /*for(std::map::iterator it = self->threadArgStore->begin(); + it != self->threadArgStore->end(); it++) + { + PyObject *args = PyTuple_New(1); + PyTuple_SetItem(args, 0, PyString_FromString(it->first.c_str())); + Device_manager_stop(self, args); + Py_DECREF(args); + } + + delete self->threadArgStore;*/ + self->ob_type->tp_free((PyObject *)self); +} + +PyObject *Video_out_manager_open(Video_out_manager *self, PyObject *args) +{ + + + Py_RETURN_NONE; +} + diff --git a/v4l2out.h b/v4l2out.h new file mode 100644 index 0000000..eb31512 --- /dev/null +++ b/v4l2out.h @@ -0,0 +1,46 @@ +#ifndef __V4L2OUT_H__ +#define __V4L2OUT_H__ + +#include +#include +#include +#include + +class Video_out_manager_cl{ +public: + PyObject_HEAD + //std::map *threadArgStore; +}; +typedef Video_out_manager_cl Video_out_manager; + +int Video_out_manager_init(Video_out_manager *self, PyObject *args, + PyObject *kwargs); + +void Video_out_manager_dealloc(Video_out_manager *self); + +PyObject *Video_out_manager_open(Video_out_manager *self, PyObject *args); + + +// ****************************************************************** + +static PyMethodDef Video_out_manager_methods[] = { + {"test", (PyCFunction)Video_out_manager_open, METH_VARARGS, + "test(dev = '\\dev\\video0')\n\n" + "Open video output."}, + {NULL} +}; + +static PyTypeObject Video_out_manager_type = { + PyObject_HEAD_INIT(NULL) + 0, "v4l2capture.Video_out_manager", sizeof(Video_out_manager), 0, + (destructor)Video_out_manager_dealloc, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, Py_TPFLAGS_DEFAULT, "Video_out_manager(path)\n\nOpens the video device at " + "the given path and returns an object that can capture images. The " + "constructor and all methods except close may raise IOError.", 0, 0, 0, + 0, 0, 0, Video_out_manager_methods, 0, 0, 0, 0, 0, 0, 0, + (initproc)Video_out_manager_init +}; + +#endif //__V4L2OUT_H__ + + From 7ae8da123829b5b48d5e28a57bfcccef6f4f3b49 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Tue, 22 Oct 2013 19:50:02 +0100 Subject: [PATCH 055/256] Add low level function for video out --- v4l2capture.cpp | 18 +++++++++++ v4l2out.cpp | 84 ++++++++++++++++++++++++++++++++++++++++++++++++- v4l2out.h | 18 ----------- 3 files changed, 101 insertions(+), 19 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index c28cf2e..dfa05e2 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -1471,6 +1471,24 @@ static PyTypeObject Device_manager_type = { (initproc)Device_manager_init }; +static PyMethodDef Video_out_manager_methods[] = { + {"test", (PyCFunction)Video_out_manager_open, METH_VARARGS, + "test(dev = '\\dev\\video0')\n\n" + "Open video output."}, + {NULL} +}; + +static PyTypeObject Video_out_manager_type = { + PyObject_HEAD_INIT(NULL) + 0, "v4l2capture.Video_out_manager", sizeof(Video_out_manager), 0, + (destructor)Video_out_manager_dealloc, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, Py_TPFLAGS_DEFAULT, "Video_out_manager(path)\n\nOpens the video device at " + "the given path and returns an object that can capture images. The " + "constructor and all methods except close may raise IOError.", 0, 0, 0, + 0, 0, 0, Video_out_manager_methods, 0, 0, 0, 0, 0, 0, 0, + (initproc)Video_out_manager_init +}; + // ********************************************************************* static PyMethodDef module_methods[] = { diff --git a/v4l2out.cpp b/v4l2out.cpp index 7e74ab8..2538186 100644 --- a/v4l2out.cpp +++ b/v4l2out.cpp @@ -1,10 +1,92 @@ #include "v4l2out.h" + + +class Video_out +{ +public: + Video_out_manager *self; + std::string devName; + int stop; + int stopped; + pthread_mutex_t lock; + int verbose; + + Video_out(const char *devNameIn) + { + stop = 0; + stopped = 1; + verbose = 0; + this->devName = devNameIn; + pthread_mutex_init(&lock, NULL); + + } + + virtual ~Video_out() + { + + + pthread_mutex_destroy(&lock); + } + + +public: + void Run() + { + if(verbose) printf("Thread started: %s\n", this->devName.c_str()); + int running = 1; + pthread_mutex_lock(&this->lock); + this->stopped = 0; + pthread_mutex_unlock(&this->lock); + + try + { + while(running) + { + //printf("Sleep\n"); + usleep(1000); + + pthread_mutex_lock(&this->lock); + try + { + + running = !this->stop; + } + catch(std::exception &err) + { + if(verbose) printf("An exception has occured: %s\n", err.what()); + running = 0; + } + pthread_mutex_unlock(&this->lock); + } + } + catch(std::exception &err) + { + if(verbose) printf("An exception has occured: %s\n", err.what()); + } + + if(verbose) printf("Thread stopping\n"); + pthread_mutex_lock(&this->lock); + this->stopped = 1; + pthread_mutex_unlock(&this->lock); + }; +}; + +void *Video_out_manager_Worker_thread(void *arg) +{ + class Video_out *argobj = (class Video_out*) arg; + argobj->Run(); + + return NULL; +} + +// ***************************************************************** + int Video_out_manager_init(Video_out_manager *self, PyObject *args, PyObject *kwargs) { - //self->threadArgStore = new std::map; + //self->threadArgStore = new std::map; return 0; } diff --git a/v4l2out.h b/v4l2out.h index eb31512..4069abf 100644 --- a/v4l2out.h +++ b/v4l2out.h @@ -23,24 +23,6 @@ PyObject *Video_out_manager_open(Video_out_manager *self, PyObject *args); // ****************************************************************** -static PyMethodDef Video_out_manager_methods[] = { - {"test", (PyCFunction)Video_out_manager_open, METH_VARARGS, - "test(dev = '\\dev\\video0')\n\n" - "Open video output."}, - {NULL} -}; - -static PyTypeObject Video_out_manager_type = { - PyObject_HEAD_INIT(NULL) - 0, "v4l2capture.Video_out_manager", sizeof(Video_out_manager), 0, - (destructor)Video_out_manager_dealloc, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, Py_TPFLAGS_DEFAULT, "Video_out_manager(path)\n\nOpens the video device at " - "the given path and returns an object that can capture images. The " - "constructor and all methods except close may raise IOError.", 0, 0, 0, - 0, 0, 0, Video_out_manager_methods, 0, 0, 0, 0, 0, 0, 0, - (initproc)Video_out_manager_init -}; - #endif //__V4L2OUT_H__ From 442d852ec823c5971b19a9bef5a2d771fa69e1a6 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Tue, 22 Oct 2013 19:56:38 +0100 Subject: [PATCH 056/256] Add open and close methods --- v4l2capture.cpp | 7 +++++-- v4l2out.cpp | 9 ++++----- v4l2out.h | 1 + 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index dfa05e2..6049b21 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -1472,9 +1472,12 @@ static PyTypeObject Device_manager_type = { }; static PyMethodDef Video_out_manager_methods[] = { - {"test", (PyCFunction)Video_out_manager_open, METH_VARARGS, - "test(dev = '\\dev\\video0')\n\n" + {"open", (PyCFunction)Video_out_manager_open, METH_VARARGS, + "open(dev = '\\dev\\video0')\n\n" "Open video output."}, + {"close", (PyCFunction)Video_out_manager_close, METH_VARARGS, + "close(dev = '\\dev\\video0')\n\n" + "Close video device. Subsequent calls to other methods will fail."}, {NULL} }; diff --git a/v4l2out.cpp b/v4l2out.cpp index 2538186..3005e26 100644 --- a/v4l2out.cpp +++ b/v4l2out.cpp @@ -1,8 +1,6 @@ #include "v4l2out.h" - - class Video_out { public: @@ -26,11 +24,9 @@ class Video_out virtual ~Video_out() { - pthread_mutex_destroy(&lock); } - public: void Run() { @@ -108,8 +104,11 @@ void Video_out_manager_dealloc(Video_out_manager *self) PyObject *Video_out_manager_open(Video_out_manager *self, PyObject *args) { + Py_RETURN_NONE; +} - +PyObject *Video_out_manager_close(Video_out_manager *self, PyObject *args) +{ Py_RETURN_NONE; } diff --git a/v4l2out.h b/v4l2out.h index 4069abf..d739750 100644 --- a/v4l2out.h +++ b/v4l2out.h @@ -20,6 +20,7 @@ void Video_out_manager_dealloc(Video_out_manager *self); PyObject *Video_out_manager_open(Video_out_manager *self, PyObject *args); +PyObject *Video_out_manager_close(Video_out_manager *self, PyObject *args); // ****************************************************************** From cb9e625fa3851abae687f9a345eb8423cf2019db Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Tue, 22 Oct 2013 21:48:18 +0100 Subject: [PATCH 057/256] Start and stop worker thread --- v4l2out.cpp | 84 ++++++++++++++++++++++++++++++++++++++++++++--------- v4l2out.h | 2 +- 2 files changed, 72 insertions(+), 14 deletions(-) diff --git a/v4l2out.cpp b/v4l2out.cpp index 3005e26..1b90285 100644 --- a/v4l2out.cpp +++ b/v4l2out.cpp @@ -15,7 +15,7 @@ class Video_out { stop = 0; stopped = 1; - verbose = 0; + verbose = 1; this->devName = devNameIn; pthread_mutex_init(&lock, NULL); @@ -27,6 +27,10 @@ class Video_out pthread_mutex_destroy(&lock); } +protected: + + + public: void Run() { @@ -40,14 +44,14 @@ class Video_out { while(running) { - //printf("Sleep\n"); - usleep(1000); + printf("Sleep\n"); + usleep(1000000); pthread_mutex_lock(&this->lock); try { - running = !this->stop; + running = !this->stop; } catch(std::exception &err) { @@ -66,7 +70,28 @@ class Video_out pthread_mutex_lock(&this->lock); this->stopped = 1; pthread_mutex_unlock(&this->lock); - }; + } + + void Stop() + { + pthread_mutex_lock(&this->lock); + this->stop = 1; + pthread_mutex_unlock(&this->lock); + } + + int WaitForStop() + { + this->Stop(); + while(1) + { + pthread_mutex_lock(&this->lock); + int s = this->stopped; + pthread_mutex_unlock(&this->lock); + + if(s) return 1; + usleep(10000); + } + } }; void *Video_out_manager_Worker_thread(void *arg) @@ -82,33 +107,66 @@ void *Video_out_manager_Worker_thread(void *arg) int Video_out_manager_init(Video_out_manager *self, PyObject *args, PyObject *kwargs) { - //self->threadArgStore = new std::map; + self->threads = new std::map; return 0; } void Video_out_manager_dealloc(Video_out_manager *self) { //Stop high level threads - /*for(std::map::iterator it = self->threadArgStore->begin(); - it != self->threadArgStore->end(); it++) + for(std::map::iterator it = self->threads->begin(); + it != self->threads->end(); it++) { - PyObject *args = PyTuple_New(1); - PyTuple_SetItem(args, 0, PyString_FromString(it->first.c_str())); - Device_manager_stop(self, args); - Py_DECREF(args); + + + + it->second->Stop(); + it->second->WaitForStop(); } - delete self->threadArgStore;*/ + delete self->threads; + self->threads = NULL; self->ob_type->tp_free((PyObject *)self); } PyObject *Video_out_manager_open(Video_out_manager *self, PyObject *args) { + //Process arguments + const char *devarg = "/dev/video0"; + if(PyTuple_Size(args) >= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + + //Create worker thread + pthread_t thread; + Video_out *threadArgs = new Video_out(devarg); + (*self->threads)[devarg] = threadArgs; + threadArgs->self = self; + pthread_create(&thread, NULL, Video_out_manager_Worker_thread, threadArgs); + Py_RETURN_NONE; } PyObject *Video_out_manager_close(Video_out_manager *self, PyObject *args) { + //Process arguments + const char *devarg = "/dev/video0"; + if(PyTuple_Size(args) >= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + + //Stop worker thread + std::map::iterator it = self->threads->find(devarg); + + if(it != self->threads->end()) + { + it->second->Stop(); + } + Py_RETURN_NONE; } diff --git a/v4l2out.h b/v4l2out.h index d739750..2097974 100644 --- a/v4l2out.h +++ b/v4l2out.h @@ -9,7 +9,7 @@ class Video_out_manager_cl{ public: PyObject_HEAD - //std::map *threadArgStore; + std::map *threads; }; typedef Video_out_manager_cl Video_out_manager; From 2ac1968242a2f6f1c356a8c0b714735869e92903 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Wed, 23 Oct 2013 06:13:18 +0100 Subject: [PATCH 058/256] Adding actual v4l2loopback code to worker thread --- v4l2out.cpp | 92 ++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 91 insertions(+), 1 deletion(-) diff --git a/v4l2out.cpp b/v4l2out.cpp index 1b90285..883999d 100644 --- a/v4l2out.cpp +++ b/v4l2out.cpp @@ -1,6 +1,32 @@ +#include +#include +#include + +#include +#include +#include +#include +#include #include "v4l2out.h" +#define ROUND_UP_2(num) (((num)+1)&~1) +#define ROUND_UP_4(num) (((num)+3)&~3) +#define ROUND_UP_8(num) (((num)+7)&~7) + +void print_format(struct v4l2_format*vid_format) { + printf(" vid_format->type =%d\n", vid_format->type ); + printf(" vid_format->fmt.pix.width =%d\n", vid_format->fmt.pix.width ); + printf(" vid_format->fmt.pix.height =%d\n", vid_format->fmt.pix.height ); + printf(" vid_format->fmt.pix.pixelformat =%d\n", vid_format->fmt.pix.pixelformat); + printf(" vid_format->fmt.pix.sizeimage =%d\n", vid_format->fmt.pix.sizeimage ); + printf(" vid_format->fmt.pix.field =%d\n", vid_format->fmt.pix.field ); + printf(" vid_format->fmt.pix.bytesperline=%d\n", vid_format->fmt.pix.bytesperline ); + printf(" vid_format->fmt.pix.colorspace =%d\n", vid_format->fmt.pix.colorspace ); +} + +//******************************************************************* + class Video_out { public: @@ -40,13 +66,77 @@ class Video_out this->stopped = 0; pthread_mutex_unlock(&this->lock); + int fdwr = open(this->devName.c_str(), O_RDWR); + assert(fdwr >= 0); + + struct v4l2_capability vid_caps; + int ret_code = ioctl(fdwr, VIDIOC_QUERYCAP, &vid_caps); + assert(ret_code != -1); + + struct v4l2_format vid_format; + memset(&vid_format, 0, sizeof(vid_format)); + + printf("a %d\n", vid_format.fmt.pix.sizeimage); + + ret_code = ioctl(fdwr, VIDIOC_G_FMT, &vid_format); + if(verbose)print_format(&vid_format); + + #define FRAME_WIDTH 640 + #define FRAME_HEIGHT 480 + #define FRAME_FORMAT V4L2_PIX_FMT_YVU420 + int lw = FRAME_WIDTH; /* ??? */ + int fw = ROUND_UP_4 (FRAME_WIDTH) * ROUND_UP_2 (FRAME_HEIGHT); + fw += 2 * ((ROUND_UP_8 (FRAME_WIDTH) / 2) * (ROUND_UP_2 (FRAME_HEIGHT) / 2)); + + vid_format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; + vid_format.fmt.pix.width = FRAME_WIDTH; + vid_format.fmt.pix.height = FRAME_HEIGHT; + vid_format.fmt.pix.pixelformat = FRAME_FORMAT; + vid_format.fmt.pix.sizeimage = lw; + //printf("test %d\n", vid_format.fmt.pix.sizeimage); + vid_format.fmt.pix.field = V4L2_FIELD_NONE; + vid_format.fmt.pix.bytesperline = fw; + //printf("test2 %d\n", vid_format.fmt.pix.bytesperline); + vid_format.fmt.pix.colorspace = V4L2_COLORSPACE_SRGB; + + printf("b %d\n", vid_format.fmt.pix.sizeimage); + + if(verbose)print_format(&vid_format); + + printf("b2 %d\n", vid_format.fmt.pix.sizeimage); + + ret_code = ioctl(fdwr, VIDIOC_S_FMT, &vid_format); + + printf("c %d\n", vid_format.fmt.pix.sizeimage); + + assert(ret_code != -1); + + int framesize = vid_format.fmt.pix.sizeimage; + int linewidth = vid_format.fmt.pix.bytesperline; + if(verbose)printf("frame: format=%d\tsize=%d\n", FRAME_FORMAT, framesize); + printf("d %d\n", vid_format.fmt.pix.sizeimage); + print_format(&vid_format); + + printf("test %d\n", framesize); + printf("e %d\n", vid_format.fmt.pix.sizeimage); + + printf("testa %d\n", framesize); + printf("f %d\n", vid_format.fmt.pix.sizeimage); + + __u8* buffer=(__u8*)malloc(sizeof(__u8)*framesize); + memset(buffer, 0, framesize); + + printf("testb %d\n", framesize); + try { while(running) { - printf("Sleep\n"); usleep(1000000); + printf("Write frame\n"); + write(fdwr, buffer, framesize); + pthread_mutex_lock(&this->lock); try { From 24eb4bef06d074fb596ef0e0375793043cc16659 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Wed, 23 Oct 2013 06:33:10 +0100 Subject: [PATCH 059/256] Move pixel format conversion to separate file --- pixfmt.cpp | 437 ++++++++++++++++++++++++++++++++++++++++++++++++ pixfmt.h | 17 ++ setup.py | 2 +- v4l2capture.cpp | 429 +---------------------------------------------- 4 files changed, 456 insertions(+), 429 deletions(-) create mode 100644 pixfmt.cpp create mode 100644 pixfmt.h diff --git a/pixfmt.cpp b/pixfmt.cpp new file mode 100644 index 0000000..b5dc717 --- /dev/null +++ b/pixfmt.cpp @@ -0,0 +1,437 @@ + +#include +#include +#include +#include +#include +#include +#include "pixfmt.h" + +// ********************************************************************* + +#define HUFFMAN_SEGMENT_LEN 420 + +const char huffmanSegment[HUFFMAN_SEGMENT_LEN+1] = + "\xFF\xC4\x01\xA2\x00\x00\x01\x05\x01\x01\x01\x01" + "\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x01\x02" + "\x03\x04\x05\x06\x07\x08\x09\x0A\x0B\x01\x00\x03" + "\x01\x01\x01\x01\x01\x01\x01\x01\x01\x00\x00\x00" + "\x00\x00\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09" + "\x0A\x0B\x10\x00\x02\x01\x03\x03\x02\x04\x03\x05" + "\x05\x04\x04\x00\x00\x01\x7D\x01\x02\x03\x00\x04" + "\x11\x05\x12\x21\x31\x41\x06\x13\x51\x61\x07\x22" + "\x71\x14\x32\x81\x91\xA1\x08\x23\x42\xB1\xC1\x15" + "\x52\xD1\xF0\x24\x33\x62\x72\x82\x09\x0A\x16\x17" + "\x18\x19\x1A\x25\x26\x27\x28\x29\x2A\x34\x35\x36" + "\x37\x38\x39\x3A\x43\x44\x45\x46\x47\x48\x49\x4A" + "\x53\x54\x55\x56\x57\x58\x59\x5A\x63\x64\x65\x66" + "\x67\x68\x69\x6A\x73\x74\x75\x76\x77\x78\x79\x7A" + "\x83\x84\x85\x86\x87\x88\x89\x8A\x92\x93\x94\x95" + "\x96\x97\x98\x99\x9A\xA2\xA3\xA4\xA5\xA6\xA7\xA8" + "\xA9\xAA\xB2\xB3\xB4\xB5\xB6\xB7\xB8\xB9\xBA\xC2" + "\xC3\xC4\xC5\xC6\xC7\xC8\xC9\xCA\xD2\xD3\xD4\xD5" + "\xD6\xD7\xD8\xD9\xDA\xE1\xE2\xE3\xE4\xE5\xE6\xE7" + "\xE8\xE9\xEA\xF1\xF2\xF3\xF4\xF5\xF6\xF7\xF8\xF9" + "\xFA\x11\x00\x02\x01\x02\x04\x04\x03\x04\x07\x05" + "\x04\x04\x00\x01\x02\x77\x00\x01\x02\x03\x11\x04" + "\x05\x21\x31\x06\x12\x41\x51\x07\x61\x71\x13\x22" + "\x32\x81\x08\x14\x42\x91\xA1\xB1\xC1\x09\x23\x33" + "\x52\xF0\x15\x62\x72\xD1\x0A\x16\x24\x34\xE1\x25" + "\xF1\x17\x18\x19\x1A\x26\x27\x28\x29\x2A\x35\x36" + "\x37\x38\x39\x3A\x43\x44\x45\x46\x47\x48\x49\x4A" + "\x53\x54\x55\x56\x57\x58\x59\x5A\x63\x64\x65\x66" + "\x67\x68\x69\x6A\x73\x74\x75\x76\x77\x78\x79\x7A" + "\x82\x83\x84\x85\x86\x87\x88\x89\x8A\x92\x93\x94" + "\x95\x96\x97\x98\x99\x9A\xA2\xA3\xA4\xA5\xA6\xA7" + "\xA8\xA9\xAA\xB2\xB3\xB4\xB5\xB6\xB7\xB8\xB9\xBA" + "\xC2\xC3\xC4\xC5\xC6\xC7\xC8\xC9\xCA\xD2\xD3\xD4" + "\xD5\xD6\xD7\xD8\xD9\xDA\xE2\xE3\xE4\xE5\xE6\xE7" + "\xE8\xE9\xEA\xF2\xF3\xF4\xF5\xF6\xF7\xF8\xF9\xFA"; + +int ReadJpegFrame(const unsigned char *data, unsigned offset, const unsigned char **twoBytesOut, unsigned *frameStartPosOut, unsigned *cursorOut) +{ + //Based on http://www.gdcl.co.uk/2013/05/02/Motion-JPEG.html + //and https://en.wikipedia.org/wiki/JPEG + + *twoBytesOut = NULL; + *frameStartPosOut = 0; + *cursorOut = 0; + unsigned cursor = offset; + //Check frame start + unsigned frameStartPos = offset; + const unsigned char *twoBytes = &data[cursor]; + + if (twoBytes[0] != 0xff) + { + //print "Error: found header", map(hex,twoBytes),"at position",cursor + return 0; + } + + cursor = 2 + cursor; + + //Handle padding + int paddingByte = (twoBytes[0] == 0xff && twoBytes[1] == 0xff); + if(paddingByte) + { + *twoBytesOut = twoBytes; + *frameStartPosOut = frameStartPos; + *cursorOut = cursor; + return 1; + } + + //Structure markers with 2 byte length + int markHeader = (twoBytes[0] == 0xff && twoBytes[1] >= 0xd0 && twoBytes[1] <= 0xd9); + if (markHeader) + { + *twoBytesOut = twoBytes; + *frameStartPosOut = frameStartPos; + *cursorOut = cursor; + return 1; + } + + //Determine length of compressed (entropy) data + int compressedDataStart = (twoBytes[0] == 0xff && twoBytes[1] == 0xda); + if (compressedDataStart) + { + unsigned sosLength = ((data[cursor] << 8) + data[cursor+1]); + cursor += sosLength; + + //Seek through frame + int run = 1; + while(run) + { + unsigned char byte = data[cursor]; + cursor += 1; + + if(byte == 0xff) + { + unsigned char byte2 = data[cursor]; + cursor += 1; + if(byte2 != 0x00) + { + if(byte2 >= 0xd0 && byte2 <= 0xd8) + { + //Found restart structure + //print hex(byte), hex(byte2) + } + else + { + //End of frame + run = 0; + cursor -= 2; + } + } + else + { + //Add escaped 0xff value in entropy data + } + } + else + { + + } + } + + *twoBytesOut = twoBytes; + *frameStartPosOut = frameStartPos; + *cursorOut = cursor; + return 1; + } + + //More cursor for all other segment types + unsigned segLength = (data[cursor] << 8) + data[cursor+1]; + cursor += segLength; + *twoBytesOut = twoBytes; + *frameStartPosOut = frameStartPos; + *cursorOut = cursor; + return 1; +} + +int InsertHuffmanTableCTypes(const unsigned char* inBufferPtr, unsigned inBufferLen, std::string &outBuffer) +{ + int parsing = 1; + unsigned frameStartPos = 0; + int huffFound = 0; + + outBuffer.clear(); + + while(parsing) + { + //Check if we should stop + if (frameStartPos >= inBufferLen) + { + parsing = 0; + continue; + } + + //Read the next segment + const unsigned char *twoBytes = NULL; + unsigned frameEndPos=0; + + int ok = ReadJpegFrame(inBufferPtr, frameStartPos, &twoBytes, &frameStartPos, &frameEndPos); + + //if(verbose) + // print map(hex, twoBytes), frameStartPos, frameEndPos; + + //Stop if there is a serious error + if(!ok) + { + return 0; + } + + //Check if this segment is the compressed data + if(twoBytes[0] == 0xff && twoBytes[1] == 0xda && !huffFound) + { + outBuffer.append(huffmanSegment, HUFFMAN_SEGMENT_LEN); + } + + //Check the type of frame + if(twoBytes[0] == 0xff && twoBytes[1] == 0xc4) + huffFound = 1; + + //Write current structure to output + outBuffer.append((char *)&inBufferPtr[frameStartPos], frameEndPos - frameStartPos); + + //Move cursor + frameStartPos = frameEndPos; + } + return 1; +} + +// ********************************************************************* + +struct my_error_mgr +{ + struct jpeg_error_mgr pub; /* "public" fields */ + + jmp_buf setjmp_buffer; /* for return to caller */ +}; + +int ReadJpegFile(unsigned char * inbuffer, + unsigned long insize, + unsigned char **outBuffer, + unsigned *outBufferSize, + int *widthOut, int *heightOut, int *channelsOut) +{ + /* This struct contains the JPEG decompression parameters and pointers to + * working space (which is allocated as needed by the JPEG library). + */ + struct jpeg_decompress_struct cinfo; + struct my_error_mgr jerr; + *outBuffer = NULL; + *outBufferSize = 0; + *widthOut = 0; + *heightOut = 0; + *channelsOut = 0; + + /* More stuff */ + JSAMPARRAY buffer; /* Output row buffer */ + int row_stride; /* physical row width in output buffer */ + + /* Step 1: initialize the JPEG decompression object. */ + cinfo.err = jpeg_std_error(&jerr.pub); + jpeg_create_decompress(&cinfo); + + /* Step 2: specify data source */ + jpeg_mem_src(&cinfo, inbuffer, insize); + + /* Step 3: read file parameters with jpeg_read_header() */ + jpeg_read_header(&cinfo, TRUE); + + *outBufferSize = cinfo.image_width * cinfo.image_height * cinfo.num_components; + *outBuffer = new unsigned char[*outBufferSize]; + *widthOut = cinfo.image_width; + *heightOut = cinfo.image_height; + *channelsOut = cinfo.num_components; + + /* Step 4: set parameters for decompression */ + //Optional + + /* Step 5: Start decompressor */ + jpeg_start_decompress(&cinfo); + /* JSAMPLEs per row in output buffer */ + row_stride = cinfo.output_width * cinfo.output_components; + /* Make a one-row-high sample array that will go away when done with image */ + buffer = (*cinfo.mem->alloc_sarray) + ((j_common_ptr) &cinfo, JPOOL_IMAGE, row_stride, 1); + + /* Step 6: while (scan lines remain to be read) */ + /* jpeg_read_scanlines(...); */ + + /* Here we use the library's state variable cinfo.output_scanline as the + * loop counter, so that we don't have to keep track ourselves. + */ + while (cinfo.output_scanline < cinfo.output_height) { + /* jpeg_read_scanlines expects an array of pointers to scanlines. + * Here the array is only one element long, but you could ask for + * more than one scanline at a time if that's more convenient. + */ + jpeg_read_scanlines(&cinfo, buffer, 1); + /* Assume put_scanline_someplace wants a pointer and sample count. */ + //put_scanline_someplace(buffer[0], row_stride); + assert(row_stride = cinfo.image_width * cinfo.num_components); + //printf("%ld\n", (long)buffer); + //printf("%ld\n", (long)buffer[0]); + //printf("%d %d\n", (cinfo.output_scanline-1) * row_stride, *outBufferSize); + //printf("%ld %ld\n", (long)outBuffer, (long)&outBuffer[(cinfo.output_scanline-1) * row_stride]); + memcpy(&(*outBuffer)[(cinfo.output_scanline-1) * row_stride], buffer[0], row_stride); + } + + /* Step 7: Finish decompression */ + jpeg_finish_decompress(&cinfo); + + /* Step 8: Release JPEG decompression object */ + + /* This is an important step since it will release a good deal of memory. */ + jpeg_destroy_decompress(&cinfo); + + /* At this point you may want to check to see whether any corrupt-data + * warnings occurred (test whether jerr.pub.num_warnings is nonzero). + */ + + return 1; +} + +// ********************************************************************* + +int DecodeFrame(const unsigned char *data, unsigned dataLen, + const char *inPxFmt, + int width, int height, + const char *targetPxFmt, + unsigned char **buffOut, + unsigned *buffOutLen) +{ + //printf("rx %d %s\n", dataLen, inPxFmt); + *buffOut = NULL; + *buffOutLen = 0; + + if(strcmp(inPxFmt, targetPxFmt) == 0) + { + //Conversion not required, return a shallow copy + *buffOutLen = dataLen; + *buffOut = new unsigned char[dataLen]; + memcpy(*buffOut, data, dataLen); + return 1; + } + + if(strcmp(inPxFmt,"MJPEG")==0 && strcmp(targetPxFmt, "RGB24")==0) + { + std::string jpegBin; + InsertHuffmanTableCTypes(data, dataLen, jpegBin); + + unsigned char *decodedBuff = NULL; + unsigned decodedBuffSize = 0; + int widthActual = 0, heightActual = 0, channelsActual = 0; + + ReadJpegFile((unsigned char*)jpegBin.c_str(), jpegBin.length(), + &decodedBuff, + &decodedBuffSize, + &widthActual, &heightActual, &channelsActual); + + if(widthActual == width && heightActual == height) + { + assert(channelsActual == 3); + *buffOut = decodedBuff; + *buffOutLen = decodedBuffSize; + } + else + { + delete [] decodedBuff; + throw std::runtime_error("Decoded jpeg has unexpected size"); + } + return 1; + } + + if(strcmp(inPxFmt,"YUYV")==0 && strcmp(targetPxFmt, "RGB24")==0) + { + // Convert buffer from YUYV to RGB. + // For the byte order, see: http://v4l2spec.bytesex.org/spec/r4339.htm + // For the color conversion, see: http://v4l2spec.bytesex.org/spec/x2123.htm + *buffOutLen = dataLen * 6 / 4; + char *rgb = new char[*buffOutLen]; + *buffOut = (unsigned char*)rgb; + + char *rgb_max = rgb + *buffOutLen; + const unsigned char *yuyv = data; + + #define CLAMP(c) ((c) <= 0 ? 0 : (c) >= 65025 ? 255 : (c) >> 8) + while(rgb < rgb_max) + { + int u = yuyv[1] - 128; + int v = yuyv[3] - 128; + int uv = 100 * u + 208 * v; + u *= 516; + v *= 409; + + int y = 298 * (yuyv[0] - 16); + rgb[0] = CLAMP(y + v); + rgb[1] = CLAMP(y - uv); + rgb[2] = CLAMP(y + u); + + y = 298 * (yuyv[2] - 16); + rgb[3] = CLAMP(y + v); + rgb[4] = CLAMP(y - uv); + rgb[5] = CLAMP(y + u); + + rgb += 6; + yuyv += 4; + } + #undef CLAMP + return 1; + } + + /* + //Untested code + if((strcmp(inPxFmt,"YUV2")==0 || strcmp(inPxFmt,"YVU2")==0) + && strcmp(targetPxFmt, "RGB24")==0) + { + int uoff = 1; + int voff = 3; + if(strcmp(inPxFmt,"YUV2")==0) + { + uoff = 1; + voff = 3; + } + if(strcmp(inPxFmt,"YVU2")==0) + { + uoff = 3; + voff = 1; + } + + int stride = width * 4; + int hwidth = width/2; + for(int lineNum=0; lineNum < height; lineNum++) + { + int lineOffset = lineNum * stride; + int outOffset = lineNum * width * 3; + + for(int pxPairNum=0; pxPairNum < hwidth; pxPairNum++) + { + unsigned char Y1 = data[pxPairNum * 4 + lineOffset]; + unsigned char Cb = data[pxPairNum * 4 + lineOffset + uoff]; + unsigned char Y2 = data[pxPairNum * 4 + lineOffset + 2]; + unsigned char Cr = data[pxPairNum * 4 + lineOffset + voff]; + + //ITU-R BT.601 colour conversion + double R1 = (Y1 + 1.402 * (Cr - 128)); + double G1 = (Y1 - 0.344 * (Cb - 128) - 0.714 * (Cr - 128)); + double B1 = (Y1 + 1.772 * (Cb - 128)); + double R2 = (Y2 + 1.402 * (Cr - 128)); + double G2 = (Y2 - 0.344 * (Cb - 128) - 0.714 * (Cr - 128)); + double B2 = (Y2 + 1.772 * (Cb - 128)); + + (*buffOut)[outOffset + pxPairNum * 6] = R1; + (*buffOut)[outOffset + pxPairNum * 6 + 1] = G1; + (*buffOut)[outOffset + pxPairNum * 6 + 2] = B1; + (*buffOut)[outOffset + pxPairNum * 6 + 3] = R2; + (*buffOut)[outOffset + pxPairNum * 6 + 4] = G2; + (*buffOut)[outOffset + pxPairNum * 6 + 5] = B2; + } + } + } + */ + + return 0; +} + + diff --git a/pixfmt.h b/pixfmt.h new file mode 100644 index 0000000..f134f96 --- /dev/null +++ b/pixfmt.h @@ -0,0 +1,17 @@ + +#ifndef _PIXFMT_H_ +#define _PIXFMT_H_ + +#include + +int DecodeFrame(const unsigned char *data, unsigned dataLen, + const char *inPxFmt, + int width, int height, + const char *targetPxFmt, + unsigned char **buffOut, + unsigned *buffOutLen); + +int InsertHuffmanTableCTypes(const unsigned char* inBufferPtr, unsigned inBufferLen, std::string &outBuffer); + +#endif //_PIXFMT_H_ + diff --git a/setup.py b/setup.py index 57fb656..c21a84a 100755 --- a/setup.py +++ b/setup.py @@ -25,5 +25,5 @@ "License :: Public Domain", "Programming Language :: C++"], ext_modules = [ - Extension("v4l2capture", ["v4l2capture.cpp", "v4l2out.cpp"], libraries = ["v4l2", "pthread", "jpeg"])]) + Extension("v4l2capture", ["v4l2capture.cpp", "v4l2out.cpp", "pixfmt.cpp"], libraries = ["v4l2", "pthread", "jpeg"])]) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 6049b21..3ea2786 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -19,9 +19,9 @@ #include #include #include -#include #include #include "v4l2out.h" +#include "pixfmt.h" #ifdef USE_LIBV4L #include @@ -134,197 +134,6 @@ static int my_ioctl(int fd, int request, void *arg, int utimeout = -1) } } -// ********************************************************************* - -#define HUFFMAN_SEGMENT_LEN 420 - -const char huffmanSegment[HUFFMAN_SEGMENT_LEN+1] = - "\xFF\xC4\x01\xA2\x00\x00\x01\x05\x01\x01\x01\x01" - "\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x01\x02" - "\x03\x04\x05\x06\x07\x08\x09\x0A\x0B\x01\x00\x03" - "\x01\x01\x01\x01\x01\x01\x01\x01\x01\x00\x00\x00" - "\x00\x00\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09" - "\x0A\x0B\x10\x00\x02\x01\x03\x03\x02\x04\x03\x05" - "\x05\x04\x04\x00\x00\x01\x7D\x01\x02\x03\x00\x04" - "\x11\x05\x12\x21\x31\x41\x06\x13\x51\x61\x07\x22" - "\x71\x14\x32\x81\x91\xA1\x08\x23\x42\xB1\xC1\x15" - "\x52\xD1\xF0\x24\x33\x62\x72\x82\x09\x0A\x16\x17" - "\x18\x19\x1A\x25\x26\x27\x28\x29\x2A\x34\x35\x36" - "\x37\x38\x39\x3A\x43\x44\x45\x46\x47\x48\x49\x4A" - "\x53\x54\x55\x56\x57\x58\x59\x5A\x63\x64\x65\x66" - "\x67\x68\x69\x6A\x73\x74\x75\x76\x77\x78\x79\x7A" - "\x83\x84\x85\x86\x87\x88\x89\x8A\x92\x93\x94\x95" - "\x96\x97\x98\x99\x9A\xA2\xA3\xA4\xA5\xA6\xA7\xA8" - "\xA9\xAA\xB2\xB3\xB4\xB5\xB6\xB7\xB8\xB9\xBA\xC2" - "\xC3\xC4\xC5\xC6\xC7\xC8\xC9\xCA\xD2\xD3\xD4\xD5" - "\xD6\xD7\xD8\xD9\xDA\xE1\xE2\xE3\xE4\xE5\xE6\xE7" - "\xE8\xE9\xEA\xF1\xF2\xF3\xF4\xF5\xF6\xF7\xF8\xF9" - "\xFA\x11\x00\x02\x01\x02\x04\x04\x03\x04\x07\x05" - "\x04\x04\x00\x01\x02\x77\x00\x01\x02\x03\x11\x04" - "\x05\x21\x31\x06\x12\x41\x51\x07\x61\x71\x13\x22" - "\x32\x81\x08\x14\x42\x91\xA1\xB1\xC1\x09\x23\x33" - "\x52\xF0\x15\x62\x72\xD1\x0A\x16\x24\x34\xE1\x25" - "\xF1\x17\x18\x19\x1A\x26\x27\x28\x29\x2A\x35\x36" - "\x37\x38\x39\x3A\x43\x44\x45\x46\x47\x48\x49\x4A" - "\x53\x54\x55\x56\x57\x58\x59\x5A\x63\x64\x65\x66" - "\x67\x68\x69\x6A\x73\x74\x75\x76\x77\x78\x79\x7A" - "\x82\x83\x84\x85\x86\x87\x88\x89\x8A\x92\x93\x94" - "\x95\x96\x97\x98\x99\x9A\xA2\xA3\xA4\xA5\xA6\xA7" - "\xA8\xA9\xAA\xB2\xB3\xB4\xB5\xB6\xB7\xB8\xB9\xBA" - "\xC2\xC3\xC4\xC5\xC6\xC7\xC8\xC9\xCA\xD2\xD3\xD4" - "\xD5\xD6\xD7\xD8\xD9\xDA\xE2\xE3\xE4\xE5\xE6\xE7" - "\xE8\xE9\xEA\xF2\xF3\xF4\xF5\xF6\xF7\xF8\xF9\xFA"; - -int ReadJpegFrame(const unsigned char *data, unsigned offset, const unsigned char **twoBytesOut, unsigned *frameStartPosOut, unsigned *cursorOut) -{ - //Based on http://www.gdcl.co.uk/2013/05/02/Motion-JPEG.html - //and https://en.wikipedia.org/wiki/JPEG - - *twoBytesOut = NULL; - *frameStartPosOut = 0; - *cursorOut = 0; - unsigned cursor = offset; - //Check frame start - unsigned frameStartPos = offset; - const unsigned char *twoBytes = &data[cursor]; - - if (twoBytes[0] != 0xff) - { - //print "Error: found header", map(hex,twoBytes),"at position",cursor - return 0; - } - - cursor = 2 + cursor; - - //Handle padding - int paddingByte = (twoBytes[0] == 0xff && twoBytes[1] == 0xff); - if(paddingByte) - { - *twoBytesOut = twoBytes; - *frameStartPosOut = frameStartPos; - *cursorOut = cursor; - return 1; - } - - //Structure markers with 2 byte length - int markHeader = (twoBytes[0] == 0xff && twoBytes[1] >= 0xd0 && twoBytes[1] <= 0xd9); - if (markHeader) - { - *twoBytesOut = twoBytes; - *frameStartPosOut = frameStartPos; - *cursorOut = cursor; - return 1; - } - - //Determine length of compressed (entropy) data - int compressedDataStart = (twoBytes[0] == 0xff && twoBytes[1] == 0xda); - if (compressedDataStart) - { - unsigned sosLength = ((data[cursor] << 8) + data[cursor+1]); - cursor += sosLength; - - //Seek through frame - int run = 1; - while(run) - { - unsigned char byte = data[cursor]; - cursor += 1; - - if(byte == 0xff) - { - unsigned char byte2 = data[cursor]; - cursor += 1; - if(byte2 != 0x00) - { - if(byte2 >= 0xd0 && byte2 <= 0xd8) - { - //Found restart structure - //print hex(byte), hex(byte2) - } - else - { - //End of frame - run = 0; - cursor -= 2; - } - } - else - { - //Add escaped 0xff value in entropy data - } - } - else - { - - } - } - - *twoBytesOut = twoBytes; - *frameStartPosOut = frameStartPos; - *cursorOut = cursor; - return 1; - } - - //More cursor for all other segment types - unsigned segLength = (data[cursor] << 8) + data[cursor+1]; - cursor += segLength; - *twoBytesOut = twoBytes; - *frameStartPosOut = frameStartPos; - *cursorOut = cursor; - return 1; -} - -int InsertHuffmanTableCTypes(const unsigned char* inBufferPtr, unsigned inBufferLen, std::string &outBuffer) -{ - int parsing = 1; - unsigned frameStartPos = 0; - int huffFound = 0; - - outBuffer.clear(); - - while(parsing) - { - //Check if we should stop - if (frameStartPos >= inBufferLen) - { - parsing = 0; - continue; - } - - //Read the next segment - const unsigned char *twoBytes = NULL; - unsigned frameEndPos=0; - - int ok = ReadJpegFrame(inBufferPtr, frameStartPos, &twoBytes, &frameStartPos, &frameEndPos); - - //if(verbose) - // print map(hex, twoBytes), frameStartPos, frameEndPos; - - //Stop if there is a serious error - if(!ok) - { - return 0; - } - - //Check if this segment is the compressed data - if(twoBytes[0] == 0xff && twoBytes[1] == 0xda && !huffFound) - { - outBuffer.append(huffmanSegment, HUFFMAN_SEGMENT_LEN); - } - - //Check the type of frame - if(twoBytes[0] == 0xff && twoBytes[1] == 0xc4) - huffFound = 1; - - //Write current structure to output - outBuffer.append((char *)&inBufferPtr[frameStartPos], frameEndPos - frameStartPos); - - //Move cursor - frameStartPos = frameEndPos; - } - return 1; -} - static PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) { /* This converts an MJPEG frame into a standard JPEG binary @@ -363,242 +172,6 @@ static PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) return outBufferPy; } -// ********************************************************************* - -struct my_error_mgr -{ - struct jpeg_error_mgr pub; /* "public" fields */ - - jmp_buf setjmp_buffer; /* for return to caller */ -}; - -int ReadJpegFile(unsigned char * inbuffer, - unsigned long insize, - unsigned char **outBuffer, - unsigned *outBufferSize, - int *widthOut, int *heightOut, int *channelsOut) -{ - /* This struct contains the JPEG decompression parameters and pointers to - * working space (which is allocated as needed by the JPEG library). - */ - struct jpeg_decompress_struct cinfo; - struct my_error_mgr jerr; - *outBuffer = NULL; - *outBufferSize = 0; - *widthOut = 0; - *heightOut = 0; - *channelsOut = 0; - - /* More stuff */ - JSAMPARRAY buffer; /* Output row buffer */ - int row_stride; /* physical row width in output buffer */ - - /* Step 1: initialize the JPEG decompression object. */ - cinfo.err = jpeg_std_error(&jerr.pub); - jpeg_create_decompress(&cinfo); - - /* Step 2: specify data source */ - jpeg_mem_src(&cinfo, inbuffer, insize); - - /* Step 3: read file parameters with jpeg_read_header() */ - jpeg_read_header(&cinfo, TRUE); - - *outBufferSize = cinfo.image_width * cinfo.image_height * cinfo.num_components; - *outBuffer = new unsigned char[*outBufferSize]; - *widthOut = cinfo.image_width; - *heightOut = cinfo.image_height; - *channelsOut = cinfo.num_components; - - /* Step 4: set parameters for decompression */ - //Optional - - /* Step 5: Start decompressor */ - jpeg_start_decompress(&cinfo); - /* JSAMPLEs per row in output buffer */ - row_stride = cinfo.output_width * cinfo.output_components; - /* Make a one-row-high sample array that will go away when done with image */ - buffer = (*cinfo.mem->alloc_sarray) - ((j_common_ptr) &cinfo, JPOOL_IMAGE, row_stride, 1); - - /* Step 6: while (scan lines remain to be read) */ - /* jpeg_read_scanlines(...); */ - - /* Here we use the library's state variable cinfo.output_scanline as the - * loop counter, so that we don't have to keep track ourselves. - */ - while (cinfo.output_scanline < cinfo.output_height) { - /* jpeg_read_scanlines expects an array of pointers to scanlines. - * Here the array is only one element long, but you could ask for - * more than one scanline at a time if that's more convenient. - */ - jpeg_read_scanlines(&cinfo, buffer, 1); - /* Assume put_scanline_someplace wants a pointer and sample count. */ - //put_scanline_someplace(buffer[0], row_stride); - assert(row_stride = cinfo.image_width * cinfo.num_components); - //printf("%ld\n", (long)buffer); - //printf("%ld\n", (long)buffer[0]); - //printf("%d %d\n", (cinfo.output_scanline-1) * row_stride, *outBufferSize); - //printf("%ld %ld\n", (long)outBuffer, (long)&outBuffer[(cinfo.output_scanline-1) * row_stride]); - memcpy(&(*outBuffer)[(cinfo.output_scanline-1) * row_stride], buffer[0], row_stride); - } - - /* Step 7: Finish decompression */ - jpeg_finish_decompress(&cinfo); - - /* Step 8: Release JPEG decompression object */ - - /* This is an important step since it will release a good deal of memory. */ - jpeg_destroy_decompress(&cinfo); - - /* At this point you may want to check to see whether any corrupt-data - * warnings occurred (test whether jerr.pub.num_warnings is nonzero). - */ - - return 1; -} - -// ********************************************************************* - -int DecodeFrame(const unsigned char *data, unsigned dataLen, - const char *inPxFmt, - int width, int height, - const char *targetPxFmt, - unsigned char **buffOut, - unsigned *buffOutLen) -{ - //printf("rx %d %s\n", dataLen, inPxFmt); - *buffOut = NULL; - *buffOutLen = 0; - - if(strcmp(inPxFmt, targetPxFmt) == 0) - { - //Conversion not required, return a shallow copy - *buffOutLen = dataLen; - *buffOut = new unsigned char[dataLen]; - memcpy(*buffOut, data, dataLen); - return 1; - } - - if(strcmp(inPxFmt,"MJPEG")==0 && strcmp(targetPxFmt, "RGB24")==0) - { - std::string jpegBin; - InsertHuffmanTableCTypes(data, dataLen, jpegBin); - - unsigned char *decodedBuff = NULL; - unsigned decodedBuffSize = 0; - int widthActual = 0, heightActual = 0, channelsActual = 0; - - ReadJpegFile((unsigned char*)jpegBin.c_str(), jpegBin.length(), - &decodedBuff, - &decodedBuffSize, - &widthActual, &heightActual, &channelsActual); - - if(widthActual == width && heightActual == height) - { - assert(channelsActual == 3); - *buffOut = decodedBuff; - *buffOutLen = decodedBuffSize; - } - else - { - delete [] decodedBuff; - throw std::runtime_error("Decoded jpeg has unexpected size"); - } - return 1; - } - - if(strcmp(inPxFmt,"YUYV")==0 && strcmp(targetPxFmt, "RGB24")==0) - { - // Convert buffer from YUYV to RGB. - // For the byte order, see: http://v4l2spec.bytesex.org/spec/r4339.htm - // For the color conversion, see: http://v4l2spec.bytesex.org/spec/x2123.htm - *buffOutLen = dataLen * 6 / 4; - char *rgb = new char[*buffOutLen]; - *buffOut = (unsigned char*)rgb; - - char *rgb_max = rgb + *buffOutLen; - const unsigned char *yuyv = data; - - #define CLAMP(c) ((c) <= 0 ? 0 : (c) >= 65025 ? 255 : (c) >> 8) - while(rgb < rgb_max) - { - int u = yuyv[1] - 128; - int v = yuyv[3] - 128; - int uv = 100 * u + 208 * v; - u *= 516; - v *= 409; - - int y = 298 * (yuyv[0] - 16); - rgb[0] = CLAMP(y + v); - rgb[1] = CLAMP(y - uv); - rgb[2] = CLAMP(y + u); - - y = 298 * (yuyv[2] - 16); - rgb[3] = CLAMP(y + v); - rgb[4] = CLAMP(y - uv); - rgb[5] = CLAMP(y + u); - - rgb += 6; - yuyv += 4; - } - #undef CLAMP - return 1; - } - - /* - //Untested code - if((strcmp(inPxFmt,"YUV2")==0 || strcmp(inPxFmt,"YVU2")==0) - && strcmp(targetPxFmt, "RGB24")==0) - { - int uoff = 1; - int voff = 3; - if(strcmp(inPxFmt,"YUV2")==0) - { - uoff = 1; - voff = 3; - } - if(strcmp(inPxFmt,"YVU2")==0) - { - uoff = 3; - voff = 1; - } - - int stride = width * 4; - int hwidth = width/2; - for(int lineNum=0; lineNum < height; lineNum++) - { - int lineOffset = lineNum * stride; - int outOffset = lineNum * width * 3; - - for(int pxPairNum=0; pxPairNum < hwidth; pxPairNum++) - { - unsigned char Y1 = data[pxPairNum * 4 + lineOffset]; - unsigned char Cb = data[pxPairNum * 4 + lineOffset + uoff]; - unsigned char Y2 = data[pxPairNum * 4 + lineOffset + 2]; - unsigned char Cr = data[pxPairNum * 4 + lineOffset + voff]; - - //ITU-R BT.601 colour conversion - double R1 = (Y1 + 1.402 * (Cr - 128)); - double G1 = (Y1 - 0.344 * (Cb - 128) - 0.714 * (Cr - 128)); - double B1 = (Y1 + 1.772 * (Cb - 128)); - double R2 = (Y2 + 1.402 * (Cr - 128)); - double G2 = (Y2 - 0.344 * (Cb - 128) - 0.714 * (Cr - 128)); - double B2 = (Y2 + 1.772 * (Cb - 128)); - - (*buffOut)[outOffset + pxPairNum * 6] = R1; - (*buffOut)[outOffset + pxPairNum * 6 + 1] = G1; - (*buffOut)[outOffset + pxPairNum * 6 + 2] = B1; - (*buffOut)[outOffset + pxPairNum * 6 + 3] = R2; - (*buffOut)[outOffset + pxPairNum * 6 + 4] = G2; - (*buffOut)[outOffset + pxPairNum * 6 + 5] = B2; - } - } - } - */ - - return 0; -} - // ********************************************************************** From ca21046ee2654bbbf7a3dcf262f3c4512f974dad Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Wed, 23 Oct 2013 10:40:06 +0100 Subject: [PATCH 060/256] Create function to send frame --- pixfmt.cpp | 59 +++++++++++++++++++++++++++++++++++++++++++++++++ v4l2capture.cpp | 3 +++ v4l2out.cpp | 16 ++++++++++++++ v4l2out.h | 2 ++ 4 files changed, 80 insertions(+) diff --git a/pixfmt.cpp b/pixfmt.cpp index b5dc717..a9cbdfc 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -292,6 +292,57 @@ int ReadJpegFile(unsigned char * inbuffer, return 1; } +// ************************************************************** + +void ConvertRGBToYUYV(const unsigned char *im, unsigned sizeimage, + unsigned width, unsigned height, + unsigned char **outIm, unsigned *outImSize) +{ + unsigned bytesperline = width * 2; + unsigned padding = 4096; + //padding = 0 + *outImSize = sizeimage+padding; + unsigned char *outBuff = new unsigned char [*outImSize]; + *outIm = outBuff; + + //imgrey = im[:,:,0] * 0.299 + im[:,:,1] * 0.587 + im[:,:,2] * 0.114 + //Pb = im[:,:,0] * -0.168736 + im[:,:,1] * -0.331264 + im[:,:,2] * 0.5 + //Pr = im[:,:,0] * 0.5 + im[:,:,1] * -0.418688 + im[:,:,2] * -0.081312 + + for (unsigned y=0; y= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + + + Py_RETURN_NONE; +} + PyObject *Video_out_manager_close(Video_out_manager *self, PyObject *args) { //Process arguments diff --git a/v4l2out.h b/v4l2out.h index 2097974..9689c55 100644 --- a/v4l2out.h +++ b/v4l2out.h @@ -20,6 +20,8 @@ void Video_out_manager_dealloc(Video_out_manager *self); PyObject *Video_out_manager_open(Video_out_manager *self, PyObject *args); +PyObject *Video_out_manager_Send_frame(Video_out_manager *self, PyObject *args); + PyObject *Video_out_manager_close(Video_out_manager *self, PyObject *args); // ****************************************************************** From f67fb89b1ffe637f99827be818a45ec5e46dc33f Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Wed, 23 Oct 2013 20:35:26 +0100 Subject: [PATCH 061/256] Pass meta data into video out manager --- v4l2capture.cpp | 2 +- v4l2out.cpp | 27 +++++++++++++++++++++++---- 2 files changed, 24 insertions(+), 5 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 9a8fcc4..d15a316 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -1049,7 +1049,7 @@ static PyMethodDef Video_out_manager_methods[] = { "open(dev = '\\dev\\video0')\n\n" "Open video output."}, {"send_frame", (PyCFunction)Video_out_manager_Send_frame, METH_VARARGS, - "send_frame(dev = '\\dev\\video0', img, pixel_format)\n\n" + "send_frame(dev = '\\dev\\video0', img, pixel_format, width, height)\n\n" "Send frame to video stream output."}, {"close", (PyCFunction)Video_out_manager_close, METH_VARARGS, "close(dev = '\\dev\\video0')\n\n" diff --git a/v4l2out.cpp b/v4l2out.cpp index 50f0934..49dae85 100644 --- a/v4l2out.cpp +++ b/v4l2out.cpp @@ -162,6 +162,11 @@ class Video_out pthread_mutex_unlock(&this->lock); } + void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) + { + printf("x %i %s %i %i\n", imgLen, pxFmt, width, height); + } + void Stop() { pthread_mutex_lock(&this->lock); @@ -242,15 +247,29 @@ PyObject *Video_out_manager_open(Video_out_manager *self, PyObject *args) PyObject *Video_out_manager_Send_frame(Video_out_manager *self, PyObject *args) { printf("Video_out_manager_Send_frame\n"); + //dev = '\\dev\\video0', img, pixel_format, width, height //Process arguments - const char *devarg = "/dev/video0"; - if(PyTuple_Size(args) >= 1) + const char *devarg = NULL; + const char *imgIn = NULL; + const char *pxFmtIn = NULL; + int widthIn = 0; + int heightIn = 0; + + if(!PyArg_ParseTuple(args, "sssii", &devarg, &imgIn, &pxFmtIn, &widthIn, &heightIn)) { - PyObject *pydevarg = PyTuple_GetItem(args, 0); - devarg = PyString_AsString(pydevarg); + PyErr_Format(PyExc_RuntimeError, "Incorrect arguments to function."); + Py_RETURN_NONE; } + PyObject *pyimg = PyTuple_GetItem(args, 1); + Py_ssize_t imgLen = PyObject_Length(pyimg); + + std::map::iterator it = self->threads->find(devarg); + if(it != self->threads->end()) + { + it->second->SendFrame(imgIn, imgLen, pxFmtIn, widthIn, heightIn); + } Py_RETURN_NONE; } From 418387ec07c78d2c49210b56f61afb0de056af76 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Wed, 23 Oct 2013 20:59:42 +0100 Subject: [PATCH 062/256] Add sending frame to buffer --- v4l2out.cpp | 72 ++++++++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 68 insertions(+), 4 deletions(-) diff --git a/v4l2out.cpp b/v4l2out.cpp index 49dae85..af06f01 100644 --- a/v4l2out.cpp +++ b/v4l2out.cpp @@ -5,9 +5,11 @@ #include #include +#include #include #include #include +#include #include "v4l2out.h" #define ROUND_UP_2(num) (((num)+1)&~1) @@ -27,6 +29,36 @@ void print_format(struct v4l2_format*vid_format) { //******************************************************************* +class SendFrameArgs +{ +public: + unsigned imgLen; + std::string pxFmt; + unsigned width; + unsigned height; + + SendFrameArgs() + { + imgLen = 0; + width = 0; + height = 0; + } + + SendFrameArgs(const SendFrameArgs &in) + { + SendFrameArgs::operator=(in); + } + + const SendFrameArgs &operator=(const SendFrameArgs &in) + { + width = in.width; + height = in.height; + imgLen = in.imgLen; + pxFmt = in.pxFmt; + return *this; + } +}; + class Video_out { public: @@ -36,6 +68,8 @@ class Video_out int stopped; pthread_mutex_t lock; int verbose; + std::vector sendFrameArgs; + std::vector sendFrameBuffer; Video_out(const char *devNameIn) { @@ -49,13 +83,21 @@ class Video_out virtual ~Video_out() { + for(unsigned i=0; isendFrameBuffer.size(); i++) + { + delete [] this->sendFrameBuffer[i]; + } + this->sendFrameBuffer.clear(); pthread_mutex_destroy(&lock); } protected: + void SendFrameInternal(class SendFrameArgs *args) + { + } public: void Run() @@ -134,6 +176,16 @@ class Video_out { usleep(1000000); + pthread_mutex_lock(&this->lock); + printf("%i\n", this->sendFrameBuffer.size()); + const char* buff = this->sendFrameBuffer[this->sendFrameBuffer.size()-1]; + class SendFrameArgs args = this->sendFrameArgs[this->sendFrameArgs.size()-1]; + + //If necessary, remove old frames (but leave one in the buffer) + //TODO + + pthread_mutex_unlock(&this->lock); + printf("Write frame\n"); write(fdwr, buffer, framesize); @@ -164,7 +216,22 @@ class Video_out void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) { - printf("x %i %s %i %i\n", imgLen, pxFmt, width, height); + pthread_mutex_lock(&this->lock); + //printf("x %i %s %i %i\n", imgLen, pxFmt, width, height); + + //Take a shallow copy of the buffer and keep for worker thread + char *buffCpy = new char[imgLen]; + memcpy(buffCpy, imgIn, imgLen); + this->sendFrameBuffer.push_back(buffCpy); + + class SendFrameArgs sendFrameArgsTmp; + sendFrameArgsTmp.imgLen = imgLen; + sendFrameArgsTmp.pxFmt = pxFmt; + sendFrameArgsTmp.width = width; + sendFrameArgsTmp.height = height; + this->sendFrameArgs.push_back(sendFrameArgsTmp); + + pthread_mutex_unlock(&this->lock); } void Stop() @@ -212,9 +279,6 @@ void Video_out_manager_dealloc(Video_out_manager *self) for(std::map::iterator it = self->threads->begin(); it != self->threads->end(); it++) { - - - it->second->Stop(); it->second->WaitForStop(); } From 144c717577b926a6c96e74124472d04a64c82bc9 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Wed, 23 Oct 2013 23:47:48 +0100 Subject: [PATCH 063/256] Add timer polling --- v4l2out.cpp | 84 +++++++++++++++++++++++++++++++++++++++-------------- 1 file changed, 62 insertions(+), 22 deletions(-) diff --git a/v4l2out.cpp b/v4l2out.cpp index af06f01..2204790 100644 --- a/v4l2out.cpp +++ b/v4l2out.cpp @@ -9,6 +9,8 @@ #include #include #include +#include +#include #include #include "v4l2out.h" @@ -70,15 +72,26 @@ class Video_out int verbose; std::vector sendFrameArgs; std::vector sendFrameBuffer; + struct timespec lastFrameTime; + int fdwr; + int framesize; Video_out(const char *devNameIn) { + this->fdwr = 0; + framesize = 0; stop = 0; stopped = 1; verbose = 1; this->devName = devNameIn; pthread_mutex_init(&lock, NULL); + clock_gettime(CLOCK_MONOTONIC, &lastFrameTime); + + struct sigevent sevp; + memset(&sevp, 0, sizeof(struct sigevent)); + sevp.sigev_notify = SIGEV_NONE; + } virtual ~Video_out() @@ -94,9 +107,50 @@ class Video_out protected: - void SendFrameInternal(class SendFrameArgs *args) + void SendFrameInternal() { + const char* buff = NULL; + class SendFrameArgs args; + + pthread_mutex_lock(&this->lock); + if(this->sendFrameBuffer.size()>=1) + { + //Get oldest frame + printf("%d\n", (int)this->sendFrameBuffer.size()); + buff = this->sendFrameBuffer[0]; + args = this->sendFrameArgs[0]; + + //Remove frame from buffer + this->sendFrameBuffer.erase(this->sendFrameBuffer.begin()); + this->sendFrameArgs.erase(this->sendFrameArgs.begin()); + } + pthread_mutex_unlock(&this->lock); + + struct timespec tp; + clock_gettime(CLOCK_MONOTONIC, &tp); + long int secSinceLastFrame = tp.tv_sec - this->lastFrameTime.tv_sec; + long int nsecSinceLastFrame = tp.tv_nsec - this->lastFrameTime.tv_nsec; + if(nsecSinceLastFrame < 0) + { + secSinceLastFrame -= 1; + nsecSinceLastFrame *= -1; + } + + if(secSinceLastFrame>=1) + { + __u8* buffer=(__u8*)malloc(sizeof(__u8)*framesize); + memset(buffer, 0, framesize); + printf("Write frame\n"); + write(this->fdwr, buffer, framesize); + + free(buffer); + this->lastFrameTime = tp; + } + + //Free image buffer + if(buff!=NULL) + delete [] buff; } public: @@ -108,11 +162,11 @@ class Video_out this->stopped = 0; pthread_mutex_unlock(&this->lock); - int fdwr = open(this->devName.c_str(), O_RDWR); + this->fdwr = open(this->devName.c_str(), O_RDWR); assert(fdwr >= 0); struct v4l2_capability vid_caps; - int ret_code = ioctl(fdwr, VIDIOC_QUERYCAP, &vid_caps); + int ret_code = ioctl(this->fdwr, VIDIOC_QUERYCAP, &vid_caps); assert(ret_code != -1); struct v4l2_format vid_format; @@ -120,7 +174,7 @@ class Video_out printf("a %d\n", vid_format.fmt.pix.sizeimage); - ret_code = ioctl(fdwr, VIDIOC_G_FMT, &vid_format); + ret_code = ioctl(this->fdwr, VIDIOC_G_FMT, &vid_format); if(verbose)print_format(&vid_format); #define FRAME_WIDTH 640 @@ -147,13 +201,13 @@ class Video_out printf("b2 %d\n", vid_format.fmt.pix.sizeimage); - ret_code = ioctl(fdwr, VIDIOC_S_FMT, &vid_format); + ret_code = ioctl(this->fdwr, VIDIOC_S_FMT, &vid_format); printf("c %d\n", vid_format.fmt.pix.sizeimage); assert(ret_code != -1); - int framesize = vid_format.fmt.pix.sizeimage; + this->framesize = vid_format.fmt.pix.sizeimage; int linewidth = vid_format.fmt.pix.bytesperline; if(verbose)printf("frame: format=%d\tsize=%d\n", FRAME_FORMAT, framesize); printf("d %d\n", vid_format.fmt.pix.sizeimage); @@ -165,29 +219,15 @@ class Video_out printf("testa %d\n", framesize); printf("f %d\n", vid_format.fmt.pix.sizeimage); - __u8* buffer=(__u8*)malloc(sizeof(__u8)*framesize); - memset(buffer, 0, framesize); - printf("testb %d\n", framesize); try { while(running) { - usleep(1000000); + usleep(1000); - pthread_mutex_lock(&this->lock); - printf("%i\n", this->sendFrameBuffer.size()); - const char* buff = this->sendFrameBuffer[this->sendFrameBuffer.size()-1]; - class SendFrameArgs args = this->sendFrameArgs[this->sendFrameArgs.size()-1]; - - //If necessary, remove old frames (but leave one in the buffer) - //TODO - - pthread_mutex_unlock(&this->lock); - - printf("Write frame\n"); - write(fdwr, buffer, framesize); + this->SendFrameInternal(); pthread_mutex_lock(&this->lock); try From a3502bd20c6c979421dd94fb645fd7331aaf5cec Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Thu, 24 Oct 2013 00:30:43 +0100 Subject: [PATCH 064/256] Convert current frame to appropriate format --- pixfmt.cpp | 4 +++ v4l2out.cpp | 90 ++++++++++++++++++++++++++++++++++++++++++++++------- 2 files changed, 82 insertions(+), 12 deletions(-) diff --git a/pixfmt.cpp b/pixfmt.cpp index a9cbdfc..99fa40d 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -493,4 +493,8 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, return 0; } +// ********************************************************* + + + diff --git a/v4l2out.cpp b/v4l2out.cpp index 2204790..ae8edd1 100644 --- a/v4l2out.cpp +++ b/v4l2out.cpp @@ -13,6 +13,7 @@ #include #include #include "v4l2out.h" +#include "pixfmt.h" #define ROUND_UP_2(num) (((num)+1)&~1) #define ROUND_UP_4(num) (((num)+3)&~3) @@ -75,6 +76,13 @@ class Video_out struct timespec lastFrameTime; int fdwr; int framesize; + unsigned char *currentFrame; + + #define FRAME_WIDTH 640 + #define FRAME_HEIGHT 480 + //#define FRAME_FORMAT V4L2_PIX_FMT_YVU420 + #define FRAME_FORMAT V4L2_PIX_FMT_YUYV + #define FRAME_FORMAT_SHORT "YUYV" Video_out(const char *devNameIn) { @@ -85,6 +93,7 @@ class Video_out verbose = 1; this->devName = devNameIn; pthread_mutex_init(&lock, NULL); + currentFrame = NULL; clock_gettime(CLOCK_MONOTONIC, &lastFrameTime); @@ -102,6 +111,10 @@ class Video_out } this->sendFrameBuffer.clear(); + if(this->currentFrame!=NULL) + delete [] this->currentFrame; + this->currentFrame = NULL; + pthread_mutex_destroy(&lock); } @@ -126,6 +139,7 @@ class Video_out } pthread_mutex_unlock(&this->lock); + //Check time since previous frame send struct timespec tp; clock_gettime(CLOCK_MONOTONIC, &tp); long int secSinceLastFrame = tp.tv_sec - this->lastFrameTime.tv_sec; @@ -136,15 +150,59 @@ class Video_out nsecSinceLastFrame *= -1; } - if(secSinceLastFrame>=1) + if(buff != NULL) + { + //Convert new frame to correct size and pixel format + assert(strcmp(args.pxFmt.c_str(), "RGB24")==0); + unsigned resizeBuffLen = FRAME_WIDTH * FRAME_HEIGHT * 3; + char *buffResize = new char[resizeBuffLen]; + memset(buffResize, 0, resizeBuffLen); + for(unsigned x = 0; x < FRAME_WIDTH; x++) + { + if (x >= args.width) continue; + for(unsigned y = 0; y < FRAME_HEIGHT; y++) + { + if (y >= args.height) continue; + buffResize[y * FRAME_WIDTH * 3 + x * 3] = buff[y * args.width * 3 + x * 3]; + } + } + + unsigned char *buffOut = NULL; + unsigned buffOutLen = 0; + DecodeFrame((unsigned char *)buffResize, resizeBuffLen, + args.pxFmt.c_str(), + args.width, args.height, + FRAME_FORMAT_SHORT, + &buffOut, + &buffOutLen); + + assert(buffOutLen == this->framesize); + + //Replace current frame with new encoded frame + if(this->currentFrame!=NULL) + delete [] this->currentFrame; + this->currentFrame = buffOut; + + delete [] buffResize; + + } + + //If we have no data, initialise with a blank frame + if(this->currentFrame==NULL) { - __u8* buffer=(__u8*)malloc(sizeof(__u8)*framesize); - memset(buffer, 0, framesize); + this->currentFrame = new unsigned char[this->framesize]; + memset(this->currentFrame, 0, this->framesize); + } - printf("Write frame\n"); - write(this->fdwr, buffer, framesize); + int timeElapsed = secSinceLastFrame>=1; + + if(timeElapsed || buff != NULL) + { + //Send frame update due to time elapse + if(timeElapsed) + printf("Write frame due to elapse time\n"); + write(this->fdwr, this->currentFrame, this->framesize); - free(buffer); this->lastFrameTime = tp; } @@ -177,12 +235,20 @@ class Video_out ret_code = ioctl(this->fdwr, VIDIOC_G_FMT, &vid_format); if(verbose)print_format(&vid_format); - #define FRAME_WIDTH 640 - #define FRAME_HEIGHT 480 - #define FRAME_FORMAT V4L2_PIX_FMT_YVU420 - int lw = FRAME_WIDTH; /* ??? */ - int fw = ROUND_UP_4 (FRAME_WIDTH) * ROUND_UP_2 (FRAME_HEIGHT); - fw += 2 * ((ROUND_UP_8 (FRAME_WIDTH) / 2) * (ROUND_UP_2 (FRAME_HEIGHT) / 2)); + int lw = 0; + int fw = 0; + if(FRAME_FORMAT==V4L2_PIX_FMT_YVU420) + { + lw = FRAME_WIDTH; /* ??? */ + fw = ROUND_UP_4 (FRAME_WIDTH) * ROUND_UP_2 (FRAME_HEIGHT); + fw += 2 * ((ROUND_UP_8 (FRAME_WIDTH) / 2) * (ROUND_UP_2 (FRAME_HEIGHT) / 2)); + } + + if(FRAME_FORMAT==V4L2_PIX_FMT_YUYV) + { + lw = (ROUND_UP_2 (FRAME_WIDTH) * 2); + fw = lw * FRAME_HEIGHT; + } vid_format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; vid_format.fmt.pix.width = FRAME_WIDTH; From 628d29c3c6834125fb7158f1dfa4ed000759a36b Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Thu, 24 Oct 2013 00:32:53 +0100 Subject: [PATCH 065/256] Convert current frame to appropriate format --- pixfmt.cpp | 3 +-- v4l2out.cpp | 13 +++++++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/pixfmt.cpp b/pixfmt.cpp index 99fa40d..1ff5f3b 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -299,8 +299,7 @@ void ConvertRGBToYUYV(const unsigned char *im, unsigned sizeimage, unsigned char **outIm, unsigned *outImSize) { unsigned bytesperline = width * 2; - unsigned padding = 4096; - //padding = 0 + unsigned padding = 0; *outImSize = sizeimage+padding; unsigned char *outBuff = new unsigned char [*outImSize]; *outIm = outBuff; diff --git a/v4l2out.cpp b/v4l2out.cpp index ae8edd1..3835dce 100644 --- a/v4l2out.cpp +++ b/v4l2out.cpp @@ -77,6 +77,7 @@ class Video_out int fdwr; int framesize; unsigned char *currentFrame; + unsigned char *paddingBuff; #define FRAME_WIDTH 640 #define FRAME_HEIGHT 480 @@ -94,6 +95,7 @@ class Video_out this->devName = devNameIn; pthread_mutex_init(&lock, NULL); currentFrame = NULL; + paddingBuff = NULL; clock_gettime(CLOCK_MONOTONIC, &lastFrameTime); @@ -115,6 +117,10 @@ class Video_out delete [] this->currentFrame; this->currentFrame = NULL; + if(this->paddingBuff!=NULL) + delete [] this->paddingBuff; + this->paddingBuff = NULL; + pthread_mutex_destroy(&lock); } @@ -194,6 +200,12 @@ class Video_out memset(this->currentFrame, 0, this->framesize); } + if(this->paddingBuff==NULL) + { + this->paddingBuff = new unsigned char[4096]; + memset(this->paddingBuff, 0, 4096); + } + int timeElapsed = secSinceLastFrame>=1; if(timeElapsed || buff != NULL) @@ -201,6 +213,7 @@ class Video_out //Send frame update due to time elapse if(timeElapsed) printf("Write frame due to elapse time\n"); + write(this->fdwr, this->paddingBuff, 4096); write(this->fdwr, this->currentFrame, this->framesize); this->lastFrameTime = tp; From 2ade3e6bef35aad2ed59699264d4d2486c7063a4 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Thu, 24 Oct 2013 00:36:34 +0100 Subject: [PATCH 066/256] Set colour channels to grey --- pixfmt.cpp | 22 +++++++++------------- 1 file changed, 9 insertions(+), 13 deletions(-) diff --git a/pixfmt.cpp b/pixfmt.cpp index 1ff5f3b..21a8e81 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -320,26 +320,22 @@ void ConvertRGBToYUYV(const unsigned char *im, unsigned sizeimage, } //Set color information for Cb - /*cursor = y * bytesperline + padding + cursor = y * bytesperline + padding; for(unsigned x=0;x< width;x++) { - try: - buff[cursor+1] = 0.5 * (Pb[y, x] + Pb[y, x+1]) + 128 - except IndexError: - pass - cursor += 4 + //buff[cursor+1] = 0.5 * (Pb[y, x] + Pb[y, x+1]) + 128 + outBuff[cursor+1] = 128; + cursor += 4; } //Set color information for Cr - cursor = y * bytesperline + padding + cursor = y * bytesperline + padding; for(unsigned x=0;x< width;x++) { - try: - buff[cursor+3] = 0.5 * (Pr[y, x] + Pr[y, x+1]) + 128 - except IndexError: - pass - cursor += 4 - }*/ + //buff[cursor+3] = 0.5 * (Pr[y, x] + Pr[y, x+1]) + 128 + outBuff[cursor+3] = 128; + cursor += 4; + } } } // ********************************************************************* From 9e474d6c728cff054c673b408d12d688d0982c8c Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Thu, 24 Oct 2013 01:43:28 +0100 Subject: [PATCH 067/256] Fix yuyv colour transform --- pixfmt.cpp | 23 +++++++++++++---------- v4l2out.cpp | 4 +++- 2 files changed, 16 insertions(+), 11 deletions(-) diff --git a/pixfmt.cpp b/pixfmt.cpp index 21a8e81..517c639 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -303,10 +303,7 @@ void ConvertRGBToYUYV(const unsigned char *im, unsigned sizeimage, *outImSize = sizeimage+padding; unsigned char *outBuff = new unsigned char [*outImSize]; *outIm = outBuff; - - //imgrey = im[:,:,0] * 0.299 + im[:,:,1] * 0.587 + im[:,:,2] * 0.114 - //Pb = im[:,:,0] * -0.168736 + im[:,:,1] * -0.331264 + im[:,:,2] * 0.5 - //Pr = im[:,:,0] * 0.5 + im[:,:,1] * -0.418688 + im[:,:,2] * -0.081312 + unsigned char *im2 = (unsigned char *)im; for (unsigned y=0; y= args.height) continue; buffResize[y * FRAME_WIDTH * 3 + x * 3] = buff[y * args.width * 3 + x * 3]; + buffResize[y * FRAME_WIDTH * 3 + x * 3 + 1] = buff[y * args.width * 3 + x * 3 + 1]; + buffResize[y * FRAME_WIDTH * 3 + x * 3 + 2] = buff[y * args.width * 3 + x * 3 + 2]; } } @@ -177,7 +179,7 @@ class Video_out unsigned buffOutLen = 0; DecodeFrame((unsigned char *)buffResize, resizeBuffLen, args.pxFmt.c_str(), - args.width, args.height, + FRAME_WIDTH, FRAME_HEIGHT, FRAME_FORMAT_SHORT, &buffOut, &buffOutLen); From b7f612ebd4cbb9dbbd396473101ab26df86cdf5d Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Thu, 24 Oct 2013 01:59:57 +0100 Subject: [PATCH 068/256] Allow configuration of pix format and output size --- v4l2capture.cpp | 2 +- v4l2out.cpp | 73 +++++++++++++++++++++++++++++-------------------- 2 files changed, 45 insertions(+), 30 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index d15a316..75a764f 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -1046,7 +1046,7 @@ static PyTypeObject Device_manager_type = { static PyMethodDef Video_out_manager_methods[] = { {"open", (PyCFunction)Video_out_manager_open, METH_VARARGS, - "open(dev = '\\dev\\video0')\n\n" + "open(dev = '\\dev\\video0', pixel_format, width, height)\n\n" "Open video output."}, {"send_frame", (PyCFunction)Video_out_manager_Send_frame, METH_VARARGS, "send_frame(dev = '\\dev\\video0', img, pixel_format, width, height)\n\n" diff --git a/v4l2out.cpp b/v4l2out.cpp index f556b72..81d5106 100644 --- a/v4l2out.cpp +++ b/v4l2out.cpp @@ -78,12 +78,9 @@ class Video_out int framesize; unsigned char *currentFrame; unsigned char *paddingBuff; - - #define FRAME_WIDTH 640 - #define FRAME_HEIGHT 480 - //#define FRAME_FORMAT V4L2_PIX_FMT_YVU420 - #define FRAME_FORMAT V4L2_PIX_FMT_YUYV - #define FRAME_FORMAT_SHORT "YUYV" + int outputWidth; + int outputHeight; + std::string outputPxFmt; Video_out(const char *devNameIn) { @@ -96,6 +93,9 @@ class Video_out pthread_mutex_init(&lock, NULL); currentFrame = NULL; paddingBuff = NULL; + outputWidth = 640; + outputHeight = 480; + outputPxFmt = "YUYV"; clock_gettime(CLOCK_MONOTONIC, &lastFrameTime); @@ -160,18 +160,18 @@ class Video_out { //Convert new frame to correct size and pixel format assert(strcmp(args.pxFmt.c_str(), "RGB24")==0); - unsigned resizeBuffLen = FRAME_WIDTH * FRAME_HEIGHT * 3; + unsigned resizeBuffLen = this->outputWidth * this->outputHeight * 3; char *buffResize = new char[resizeBuffLen]; memset(buffResize, 0, resizeBuffLen); - for(unsigned x = 0; x < FRAME_WIDTH; x++) + for(unsigned x = 0; x < this->outputWidth; x++) { if (x >= args.width) continue; - for(unsigned y = 0; y < FRAME_HEIGHT; y++) + for(unsigned y = 0; y < this->outputHeight; y++) { if (y >= args.height) continue; - buffResize[y * FRAME_WIDTH * 3 + x * 3] = buff[y * args.width * 3 + x * 3]; - buffResize[y * FRAME_WIDTH * 3 + x * 3 + 1] = buff[y * args.width * 3 + x * 3 + 1]; - buffResize[y * FRAME_WIDTH * 3 + x * 3 + 2] = buff[y * args.width * 3 + x * 3 + 2]; + buffResize[y * this->outputWidth * 3 + x * 3] = buff[y * args.width * 3 + x * 3]; + buffResize[y * this->outputWidth * 3 + x * 3 + 1] = buff[y * args.width * 3 + x * 3 + 1]; + buffResize[y * this->outputWidth * 3 + x * 3 + 2] = buff[y * args.width * 3 + x * 3 + 2]; } } @@ -179,8 +179,8 @@ class Video_out unsigned buffOutLen = 0; DecodeFrame((unsigned char *)buffResize, resizeBuffLen, args.pxFmt.c_str(), - FRAME_WIDTH, FRAME_HEIGHT, - FRAME_FORMAT_SHORT, + this->outputWidth, this->outputHeight, + this->outputPxFmt.c_str(), &buffOut, &buffOutLen); @@ -252,23 +252,30 @@ class Video_out int lw = 0; int fw = 0; - if(FRAME_FORMAT==V4L2_PIX_FMT_YVU420) + if(strcmp(this->outputPxFmt.c_str(), "YVU420")==0) { - lw = FRAME_WIDTH; /* ??? */ - fw = ROUND_UP_4 (FRAME_WIDTH) * ROUND_UP_2 (FRAME_HEIGHT); - fw += 2 * ((ROUND_UP_8 (FRAME_WIDTH) / 2) * (ROUND_UP_2 (FRAME_HEIGHT) / 2)); + lw = this->outputWidth; /* ??? */ + fw = ROUND_UP_4 (this->outputWidth) * ROUND_UP_2 (this->outputHeight); + fw += 2 * ((ROUND_UP_8 (this->outputWidth) / 2) * (ROUND_UP_2 (this->outputHeight) / 2)); } - if(FRAME_FORMAT==V4L2_PIX_FMT_YUYV) + if(strcmp(this->outputPxFmt.c_str(), "YUYV")==0) { - lw = (ROUND_UP_2 (FRAME_WIDTH) * 2); - fw = lw * FRAME_HEIGHT; + lw = (ROUND_UP_2 (this->outputWidth) * 2); + fw = lw * this->outputHeight; } vid_format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; - vid_format.fmt.pix.width = FRAME_WIDTH; - vid_format.fmt.pix.height = FRAME_HEIGHT; - vid_format.fmt.pix.pixelformat = FRAME_FORMAT; + vid_format.fmt.pix.width = this->outputWidth; + vid_format.fmt.pix.height = this->outputHeight; + vid_format.fmt.pix.pixelformat = 0; + if(strcmp(this->outputPxFmt.c_str(), "YUYV")==0) + vid_format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; + if(strcmp(this->outputPxFmt.c_str(), "YVU420")==0) + vid_format.fmt.pix.pixelformat = V4L2_PIX_FMT_YVU420; + if(strcmp(this->outputPxFmt.c_str(), "RGB24")==0) + vid_format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; + vid_format.fmt.pix.sizeimage = lw; //printf("test %d\n", vid_format.fmt.pix.sizeimage); vid_format.fmt.pix.field = V4L2_FIELD_NONE; @@ -290,7 +297,7 @@ class Video_out this->framesize = vid_format.fmt.pix.sizeimage; int linewidth = vid_format.fmt.pix.bytesperline; - if(verbose)printf("frame: format=%d\tsize=%d\n", FRAME_FORMAT, framesize); + if(verbose)printf("frame: format=%s\tsize=%d\n", this->outputPxFmt.c_str(), framesize); printf("d %d\n", vid_format.fmt.pix.sizeimage); print_format(&vid_format); @@ -412,11 +419,15 @@ void Video_out_manager_dealloc(Video_out_manager *self) PyObject *Video_out_manager_open(Video_out_manager *self, PyObject *args) { //Process arguments - const char *devarg = "/dev/video0"; - if(PyTuple_Size(args) >= 1) + const char *devarg = NULL; + const char *pxFmtIn = NULL; + int widthIn = 0; + int heightIn = 0; + + if(!PyArg_ParseTuple(args, "ssii", &devarg, &pxFmtIn, &widthIn, &heightIn)) { - PyObject *pydevarg = PyTuple_GetItem(args, 0); - devarg = PyString_AsString(pydevarg); + PyErr_Format(PyExc_RuntimeError, "Incorrect arguments to function."); + Py_RETURN_NONE; } //Create worker thread @@ -424,6 +435,10 @@ PyObject *Video_out_manager_open(Video_out_manager *self, PyObject *args) Video_out *threadArgs = new Video_out(devarg); (*self->threads)[devarg] = threadArgs; threadArgs->self = self; + threadArgs->outputWidth = widthIn; + threadArgs->outputHeight = heightIn; + threadArgs->outputPxFmt = pxFmtIn; + pthread_create(&thread, NULL, Video_out_manager_Worker_thread, threadArgs); Py_RETURN_NONE; From 4d0665fa7e558aeae2a43e1d180cc61a70a8f6d0 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Thu, 24 Oct 2013 02:02:50 +0100 Subject: [PATCH 069/256] Reduce debug messages --- v4l2capture.cpp | 2 +- v4l2out.cpp | 5 ++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 75a764f..00508d1 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -282,7 +282,7 @@ class Device_manager_Worker_thread_args frameWidth = 0; frameHeight = 0; decodedFrameBuffMaxSize = 10; - verbose = 1; + verbose = 0; targetFmt = "RGB24"; } diff --git a/v4l2out.cpp b/v4l2out.cpp index 81d5106..740815f 100644 --- a/v4l2out.cpp +++ b/v4l2out.cpp @@ -88,7 +88,7 @@ class Video_out framesize = 0; stop = 0; stopped = 1; - verbose = 1; + verbose = 0; this->devName = devNameIn; pthread_mutex_init(&lock, NULL); currentFrame = NULL; @@ -135,7 +135,6 @@ class Video_out if(this->sendFrameBuffer.size()>=1) { //Get oldest frame - printf("%d\n", (int)this->sendFrameBuffer.size()); buff = this->sendFrameBuffer[0]; args = this->sendFrameArgs[0]; @@ -446,7 +445,7 @@ PyObject *Video_out_manager_open(Video_out_manager *self, PyObject *args) PyObject *Video_out_manager_Send_frame(Video_out_manager *self, PyObject *args) { - printf("Video_out_manager_Send_frame\n"); + //printf("Video_out_manager_Send_frame\n"); //dev = '\\dev\\video0', img, pixel_format, width, height //Process arguments From 3158a1c056d0895117685d8bee94e9fec49feccf Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Thu, 24 Oct 2013 02:06:25 +0100 Subject: [PATCH 070/256] Remove unnecessary padding --- v4l2out.cpp | 31 ------------------------------- 1 file changed, 31 deletions(-) diff --git a/v4l2out.cpp b/v4l2out.cpp index 740815f..cf74da8 100644 --- a/v4l2out.cpp +++ b/v4l2out.cpp @@ -77,7 +77,6 @@ class Video_out int fdwr; int framesize; unsigned char *currentFrame; - unsigned char *paddingBuff; int outputWidth; int outputHeight; std::string outputPxFmt; @@ -92,7 +91,6 @@ class Video_out this->devName = devNameIn; pthread_mutex_init(&lock, NULL); currentFrame = NULL; - paddingBuff = NULL; outputWidth = 640; outputHeight = 480; outputPxFmt = "YUYV"; @@ -117,10 +115,6 @@ class Video_out delete [] this->currentFrame; this->currentFrame = NULL; - if(this->paddingBuff!=NULL) - delete [] this->paddingBuff; - this->paddingBuff = NULL; - pthread_mutex_destroy(&lock); } @@ -201,12 +195,6 @@ class Video_out memset(this->currentFrame, 0, this->framesize); } - if(this->paddingBuff==NULL) - { - this->paddingBuff = new unsigned char[4096]; - memset(this->paddingBuff, 0, 4096); - } - int timeElapsed = secSinceLastFrame>=1; if(timeElapsed || buff != NULL) @@ -214,7 +202,6 @@ class Video_out //Send frame update due to time elapse if(timeElapsed) printf("Write frame due to elapse time\n"); - write(this->fdwr, this->paddingBuff, 4096); write(this->fdwr, this->currentFrame, this->framesize); this->lastFrameTime = tp; @@ -244,8 +231,6 @@ class Video_out struct v4l2_format vid_format; memset(&vid_format, 0, sizeof(vid_format)); - printf("a %d\n", vid_format.fmt.pix.sizeimage); - ret_code = ioctl(this->fdwr, VIDIOC_G_FMT, &vid_format); if(verbose)print_format(&vid_format); @@ -282,31 +267,15 @@ class Video_out //printf("test2 %d\n", vid_format.fmt.pix.bytesperline); vid_format.fmt.pix.colorspace = V4L2_COLORSPACE_SRGB; - printf("b %d\n", vid_format.fmt.pix.sizeimage); - if(verbose)print_format(&vid_format); - printf("b2 %d\n", vid_format.fmt.pix.sizeimage); - ret_code = ioctl(this->fdwr, VIDIOC_S_FMT, &vid_format); - printf("c %d\n", vid_format.fmt.pix.sizeimage); - assert(ret_code != -1); this->framesize = vid_format.fmt.pix.sizeimage; int linewidth = vid_format.fmt.pix.bytesperline; if(verbose)printf("frame: format=%s\tsize=%d\n", this->outputPxFmt.c_str(), framesize); - printf("d %d\n", vid_format.fmt.pix.sizeimage); - print_format(&vid_format); - - printf("test %d\n", framesize); - printf("e %d\n", vid_format.fmt.pix.sizeimage); - - printf("testa %d\n", framesize); - printf("f %d\n", vid_format.fmt.pix.sizeimage); - - printf("testb %d\n", framesize); try { From 4d2f69e746eecbec1dd05824fd690dc557e2cacd Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Thu, 24 Oct 2013 02:51:15 +0100 Subject: [PATCH 071/256] Add libjpeg error handling --- pixfmt.cpp | 27 ++++++++++++++++++++++++++- v4l2out.cpp | 30 ++++++++++++++++++++++++------ 2 files changed, 50 insertions(+), 7 deletions(-) diff --git a/pixfmt.cpp b/pixfmt.cpp index 517c639..3f85268 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -207,6 +207,18 @@ struct my_error_mgr jmp_buf setjmp_buffer; /* for return to caller */ }; +typedef struct my_error_mgr * my_error_ptr; +METHODDEF(void) my_error_exit (j_common_ptr cinfo) +{ + my_error_ptr myerr = (my_error_ptr) cinfo->err; + + /* Always display the message. */ + (*cinfo->err->output_message) (cinfo); + + /* Return control to the setjmp point */ + longjmp(myerr->setjmp_buffer, 1); +} + int ReadJpegFile(unsigned char * inbuffer, unsigned long insize, unsigned char **outBuffer, @@ -230,6 +242,16 @@ int ReadJpegFile(unsigned char * inbuffer, /* Step 1: initialize the JPEG decompression object. */ cinfo.err = jpeg_std_error(&jerr.pub); + jerr.pub.error_exit = my_error_exit; + /* Establish the setjmp return context for my_error_exit to use. */ + if (setjmp(jerr.setjmp_buffer)) { + /* If we get here, the JPEG code has signaled an error. + * We need to clean up the JPEG object, close the input file, and return. + */ + jpeg_destroy_decompress(&cinfo); + return 0; + } + jpeg_create_decompress(&cinfo); /* Step 2: specify data source */ @@ -372,11 +394,14 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, unsigned decodedBuffSize = 0; int widthActual = 0, heightActual = 0, channelsActual = 0; - ReadJpegFile((unsigned char*)jpegBin.c_str(), jpegBin.length(), + int jpegOk = ReadJpegFile((unsigned char*)jpegBin.c_str(), jpegBin.length(), &decodedBuff, &decodedBuffSize, &widthActual, &heightActual, &channelsActual); + if (!jpegOk) + throw std::runtime_error("Error decoding jpeg"); + if(widthActual == width && heightActual == height) { assert(channelsActual == 3); diff --git a/v4l2out.cpp b/v4l2out.cpp index cf74da8..35fe479 100644 --- a/v4l2out.cpp +++ b/v4l2out.cpp @@ -12,6 +12,7 @@ #include #include #include +#include #include "v4l2out.h" #include "pixfmt.h" @@ -87,7 +88,7 @@ class Video_out framesize = 0; stop = 0; stopped = 1; - verbose = 0; + verbose = 1; this->devName = devNameIn; pthread_mutex_init(&lock, NULL); currentFrame = NULL; @@ -261,10 +262,8 @@ class Video_out vid_format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; vid_format.fmt.pix.sizeimage = lw; - //printf("test %d\n", vid_format.fmt.pix.sizeimage); vid_format.fmt.pix.field = V4L2_FIELD_NONE; vid_format.fmt.pix.bytesperline = fw; - //printf("test2 %d\n", vid_format.fmt.pix.bytesperline); vid_format.fmt.pix.colorspace = V4L2_COLORSPACE_SRGB; if(verbose)print_format(&vid_format); @@ -313,7 +312,7 @@ class Video_out void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) { pthread_mutex_lock(&this->lock); - //printf("x %i %s %i %i\n", imgLen, pxFmt, width, height); + if(verbose) printf("SendFrame %i %s %i %i\n", imgLen, pxFmt, width, height); //Take a shallow copy of the buffer and keep for worker thread char *buffCpy = new char[imgLen]; @@ -424,20 +423,39 @@ PyObject *Video_out_manager_Send_frame(Video_out_manager *self, PyObject *args) int widthIn = 0; int heightIn = 0; - if(!PyArg_ParseTuple(args, "sssii", &devarg, &imgIn, &pxFmtIn, &widthIn, &heightIn)) + if(PyObject_Length(args) < 5) { - PyErr_Format(PyExc_RuntimeError, "Incorrect arguments to function."); + PyErr_Format(PyExc_RuntimeError, "Too few arguments."); Py_RETURN_NONE; } + + PyObject *pydev = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydev); + PyObject *pyimg = PyTuple_GetItem(args, 1); + imgIn = PyString_AsString(pyimg); Py_ssize_t imgLen = PyObject_Length(pyimg); + PyObject *pyPxFmt = PyTuple_GetItem(args, 2); + pxFmtIn = PyString_AsString(pyPxFmt); + + PyObject *pyWidth = PyTuple_GetItem(args, 3); + widthIn = PyInt_AsLong(pyWidth); + + PyObject *pyHeight = PyTuple_GetItem(args, 4); + heightIn = PyInt_AsLong(pyHeight); + std::map::iterator it = self->threads->find(devarg); if(it != self->threads->end()) { it->second->SendFrame(imgIn, imgLen, pxFmtIn, widthIn, heightIn); } + else + { + PyErr_Format(PyExc_RuntimeError, "Device not found."); + Py_RETURN_NONE; + } Py_RETURN_NONE; } From a5ae422c1b9d51ecb1227ab5d5fffa68667d2f26 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sat, 26 Oct 2013 17:19:48 +0100 Subject: [PATCH 072/256] Added UYVY for skype --- pixfmt.cpp | 53 +++++++++++++++++++++++++++++++++++++++++++---------- v4l2out.cpp | 5 ++++- 2 files changed, 47 insertions(+), 11 deletions(-) diff --git a/pixfmt.cpp b/pixfmt.cpp index 3f85268..72b0a2d 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -316,8 +316,8 @@ int ReadJpegFile(unsigned char * inbuffer, // ************************************************************** -void ConvertRGBToYUYV(const unsigned char *im, unsigned sizeimage, - unsigned width, unsigned height, +void ConvertRGBtoYUYVorSimilar(const unsigned char *im, unsigned sizeimage, + unsigned width, unsigned height, const char *targetPxFmt, unsigned char **outIm, unsigned *outImSize) { unsigned bytesperline = width * 2; @@ -327,15 +327,45 @@ void ConvertRGBToYUYV(const unsigned char *im, unsigned sizeimage, *outIm = outBuff; unsigned char *im2 = (unsigned char *)im; + int uOffset = 0; + int vOffset = 0; + int yOffset1 = 0; + int yOffset2 = 0; + int formatKnown = 0; + + if(strcmp(targetPxFmt, "YUYV")==0) + { + uOffset = 1; + vOffset = 3; + yOffset1 = 0; + yOffset2 = 2; + formatKnown = 1; + } + + if(strcmp(targetPxFmt, "UYVY")==0) + { + uOffset = 0; + vOffset = 2; + yOffset1 = 1; + yOffset2 = 3; + formatKnown = 1; + } + + if(!formatKnown) + { + throw std::runtime_error("Unknown target pixel format"); + } + for (unsigned y=0; youtputWidth) / 2) * (ROUND_UP_2 (this->outputHeight) / 2)); } - if(strcmp(this->outputPxFmt.c_str(), "YUYV")==0) + if(strcmp(this->outputPxFmt.c_str(), "YUYV")==0 + || strcmp(this->outputPxFmt.c_str(), "UYVY")==0 ) { lw = (ROUND_UP_2 (this->outputWidth) * 2); fw = lw * this->outputHeight; @@ -256,6 +257,8 @@ class Video_out vid_format.fmt.pix.pixelformat = 0; if(strcmp(this->outputPxFmt.c_str(), "YUYV")==0) vid_format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; + if(strcmp(this->outputPxFmt.c_str(), "UYVY")==0) + vid_format.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY; if(strcmp(this->outputPxFmt.c_str(), "YVU420")==0) vid_format.fmt.pix.pixelformat = V4L2_PIX_FMT_YVU420; if(strcmp(this->outputPxFmt.c_str(), "RGB24")==0) From 0d5887ade1265b99070795ba44e3d4cdbc4820e3 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sun, 27 Oct 2013 23:10:23 +0000 Subject: [PATCH 073/256] Fix null pointer deference --- v4l2capture.cpp | 40 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 00508d1..ad98882 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -873,6 +873,14 @@ static PyObject *Device_manager_set_format(Device_manager *self, PyObject *args) Py_RETURN_NONE; } + //Check this device is valid + std::map::iterator it = self->threadArgStore->find(devarg); + if(it==self->threadArgStore->end()) + { + PyErr_Format(PyExc_RuntimeError, "Device already not ready."); + Py_RETURN_NONE; + } + class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; threadArgs->SetFormat(fmt, size_x, size_y); @@ -897,6 +905,14 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) buffer_count = PyInt_AsLong(pybufferarg); } + //Check this device is valid + std::map::iterator it = self->threadArgStore->find(devarg); + if(it==self->threadArgStore->end()) + { + PyErr_Format(PyExc_RuntimeError, "Device already not ready."); + Py_RETURN_NONE; + } + class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; threadArgs->StartDevice(buffer_count); @@ -914,6 +930,14 @@ static PyObject *Device_manager_Get_frame(Device_manager *self, PyObject *args) devarg = PyString_AsString(pydevarg); } + //Check this device is valid + std::map::iterator it = self->threadArgStore->find(devarg); + if(it==self->threadArgStore->end()) + { + PyErr_Format(PyExc_RuntimeError, "Device already not ready."); + Py_RETURN_NONE; + } + class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; unsigned char *buffOut = NULL; class FrameMetaData metaOut; @@ -951,6 +975,14 @@ static PyObject *Device_manager_stop(Device_manager *self, PyObject *args) devarg = PyString_AsString(pydevarg); } + //Check this device is valid + std::map::iterator it = self->threadArgStore->find(devarg); + if(it==self->threadArgStore->end()) + { + PyErr_Format(PyExc_RuntimeError, "Device already not ready."); + Py_RETURN_NONE; + } + class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; threadArgs->StopDevice(); @@ -967,6 +999,14 @@ static PyObject *Device_manager_close(Device_manager *self, PyObject *args) devarg = PyString_AsString(pydevarg); } + //Check this device is valid + std::map::iterator it = self->threadArgStore->find(devarg); + if(it==self->threadArgStore->end()) + { + PyErr_Format(PyExc_RuntimeError, "Device already not ready."); + Py_RETURN_NONE; + } + class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; threadArgs->CloseDevice(); From b5de1810d44631e07e5bb0e4a7b3cc38ebba8a08 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Tue, 5 Nov 2013 16:42:33 +0000 Subject: [PATCH 074/256] List video devices --- v4l2capture.cpp | 3 +++ v4l2out.cpp | 23 +++++++++++++++++++++++ v4l2out.h | 2 ++ 3 files changed, 28 insertions(+) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index ad98882..3f9da57 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -1094,6 +1094,9 @@ static PyMethodDef Video_out_manager_methods[] = { {"close", (PyCFunction)Video_out_manager_close, METH_VARARGS, "close(dev = '\\dev\\video0')\n\n" "Close video device. Subsequent calls to other methods will fail."}, + {"list_devices", (PyCFunction)Video_out_manager_list_devices, METH_NOARGS, + "list_devices()\n\n" + "List available capture devices."}, {NULL} }; diff --git a/v4l2out.cpp b/v4l2out.cpp index 5082488..4ea18cf 100644 --- a/v4l2out.cpp +++ b/v4l2out.cpp @@ -11,6 +11,7 @@ #include #include #include +#include #include #include #include "v4l2out.h" @@ -484,3 +485,25 @@ PyObject *Video_out_manager_close(Video_out_manager *self, PyObject *args) Py_RETURN_NONE; } +PyObject *Video_out_manager_list_devices(Video_out_manager *self) +{ + PyObject *out = PyList_New(0); + const char dir[] = "/dev"; + DIR *dp; + struct dirent *dirp; + if((dp = opendir(dir)) == NULL) { + printf("Error(%d) opening %s\n", errno, dir); + Py_RETURN_NONE; + } + + while ((dirp = readdir(dp)) != NULL) { + if (strncmp(dirp->d_name, "video", 5) != 0) continue; + std::string tmp = "/dev/"; + tmp.append(dirp->d_name); + PyList_Append(out, PyString_FromString(tmp.c_str())); + } + closedir(dp); + + PyList_Sort(out); + return out; +} diff --git a/v4l2out.h b/v4l2out.h index 9689c55..709cf7c 100644 --- a/v4l2out.h +++ b/v4l2out.h @@ -24,6 +24,8 @@ PyObject *Video_out_manager_Send_frame(Video_out_manager *self, PyObject *args); PyObject *Video_out_manager_close(Video_out_manager *self, PyObject *args); +PyObject *Video_out_manager_list_devices(Video_out_manager *self); + // ****************************************************************** #endif //__V4L2OUT_H__ From 6a6ec234d0b16b5949fb624b768acb36f625deac Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Fri, 15 Nov 2013 10:05:17 +0000 Subject: [PATCH 075/256] Refactor code and rename to libvideolive --- libvideolive.cpp | 417 +++++++++++++++ setup.py | 22 +- v4l2capture.cpp | 1336 ++++++++++++++-------------------------------- v4l2capture.h | 171 ++++++ 4 files changed, 1004 insertions(+), 942 deletions(-) create mode 100644 libvideolive.cpp create mode 100644 v4l2capture.h diff --git a/libvideolive.cpp b/libvideolive.cpp new file mode 100644 index 0000000..929e771 --- /dev/null +++ b/libvideolive.cpp @@ -0,0 +1,417 @@ +// libvideolive +// Python extension to capture and stream video +// +// 2009, 2010, 2011 Fredrik Portstrom, released into the public domain +// 2011, Joakim Gebart +// 2013, Tim Sheerman-Chase +// See README for license + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "pixfmt.h" +#include "v4l2capture.h" +#include "v4l2out.h" + +typedef struct { + PyObject_HEAD + int fd; + struct buffer *buffers; + int buffer_count; +} Video_device; + +class Device_manager_cl{ +public: + PyObject_HEAD + std::map *threadArgStore; +}; +typedef Device_manager_cl Device_manager; + +static PyObject *Device_manager_stop(Device_manager *self, PyObject *args); +static PyObject *Device_manager_close(Device_manager *self, PyObject *args); + +// ********************************************************************* + +PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) +{ + /* This converts an MJPEG frame into a standard JPEG binary + MJPEG images omit the huffman table if the standard table + is used. If it is missing, this function adds the table + into the file structure. */ + + if(PyTuple_Size(args) < 1) + { + PyErr_BadArgument(); + PyErr_Format(PyExc_TypeError, "Function requires 1 argument"); + Py_RETURN_NONE; + } + + PyObject *inBuffer = PyTuple_GetItem(args, 0); + + if(!PyString_Check(inBuffer)) + { + PyErr_BadArgument(); + PyErr_Format(PyExc_TypeError, "Argument 1 must be a string."); + //PyObject* type = PyObject_Type(inBuffer); + //PyObject_Print(type, stdout, Py_PRINT_RAW); + //Py_CLEAR(type); + + Py_RETURN_NONE; + } + + unsigned char* inBufferPtr = (unsigned char*)PyString_AsString(inBuffer); + Py_ssize_t inBufferLen = PyString_Size(inBuffer); + std::string outBuffer; + + InsertHuffmanTableCTypes((unsigned char*)inBufferPtr, inBufferLen, outBuffer); + + PyObject *outBufferPy = PyString_FromStringAndSize(outBuffer.c_str(), outBuffer.length()); + + return outBufferPy; +} + +// ********************************************************************** + +static void Device_manager_dealloc(Device_manager *self) +{ + //Stop high level threads + for(std::map::iterator it = self->threadArgStore->begin(); + it != self->threadArgStore->end(); it++) + { + PyObject *args = PyTuple_New(1); + PyTuple_SetItem(args, 0, PyString_FromString(it->first.c_str())); + Device_manager_stop(self, args); + Py_DECREF(args); + } + + delete self->threadArgStore; + self->ob_type->tp_free((PyObject *)self); +} + +static int Device_manager_init(Device_manager *self, PyObject *args, + PyObject *kwargs) +{ + self->threadArgStore = new std::map; + return 0; +} + +static PyObject *Device_manager_open(Device_manager *self, PyObject *args) +{ + //Process arguments + const char *devarg = "/dev/video0"; + if(PyTuple_Size(args) >= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + + //Check this device has not already been opened + std::map::iterator it = self->threadArgStore->find(devarg); + if(it!=self->threadArgStore->end()) + { + PyErr_Format(PyExc_RuntimeError, "Device already opened."); + Py_RETURN_NONE; + } + + pthread_t thread; + Device_manager_Worker_thread_args *threadArgs = new Device_manager_Worker_thread_args(devarg); + (*self->threadArgStore)[devarg] = threadArgs; + pthread_create(&thread, NULL, Device_manager_Worker_thread, threadArgs); + + threadArgs->OpenDevice(); + + Py_RETURN_NONE; +} + + +static PyObject *Device_manager_set_format(Device_manager *self, PyObject *args) +{ + int size_x; + int size_y; + const char *fmt = NULL; + const char *devarg = NULL; + + if(!PyArg_ParseTuple(args, "sii|s", &devarg, &size_x, &size_y, &fmt)) + { + Py_RETURN_NONE; + } + + //Check this device is valid + std::map::iterator it = self->threadArgStore->find(devarg); + if(it==self->threadArgStore->end()) + { + PyErr_Format(PyExc_RuntimeError, "Device already not ready."); + Py_RETURN_NONE; + } + + class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; + threadArgs->SetFormat(fmt, size_x, size_y); + + Py_RETURN_NONE; +} + +static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) +{ + + //Process arguments + const char *devarg = "/dev/video0"; + if(PyTuple_Size(args) >= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + + long buffer_count = 10; + if(PyTuple_Size(args) >= 4) + { + PyObject *pybufferarg = PyTuple_GetItem(args, 4); + buffer_count = PyInt_AsLong(pybufferarg); + } + + //Check this device is valid + std::map::iterator it = self->threadArgStore->find(devarg); + if(it==self->threadArgStore->end()) + { + PyErr_Format(PyExc_RuntimeError, "Device already not ready."); + Py_RETURN_NONE; + } + + class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; + threadArgs->StartDevice(buffer_count); + + Py_RETURN_NONE; +} + +static PyObject *Device_manager_Get_frame(Device_manager *self, PyObject *args) +{ + + //Process arguments + const char *devarg = "/dev/video0"; + if(PyTuple_Size(args) >= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + + //Check this device is valid + std::map::iterator it = self->threadArgStore->find(devarg); + if(it==self->threadArgStore->end()) + { + PyErr_Format(PyExc_RuntimeError, "Device already not ready."); + Py_RETURN_NONE; + } + + class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; + unsigned char *buffOut = NULL; + class FrameMetaData metaOut; + + int ok = threadArgs->GetFrame(&buffOut, &metaOut); + if(ok && buffOut != NULL) + { + //Format output to python + PyObject *pymeta = PyDict_New(); + PyDict_SetItemString(pymeta, "width", PyInt_FromLong(metaOut.width)); + PyDict_SetItemString(pymeta, "height", PyInt_FromLong(metaOut.height)); + PyDict_SetItemString(pymeta, "format", PyString_FromString(metaOut.fmt.c_str())); + PyDict_SetItemString(pymeta, "sequence", PyInt_FromLong(metaOut.sequence)); + PyDict_SetItemString(pymeta, "tv_sec", PyInt_FromLong(metaOut.tv_sec)); + PyDict_SetItemString(pymeta, "tv_usec", PyInt_FromLong(metaOut.tv_usec)); + + PyObject *out = PyTuple_New(2); + PyTuple_SetItem(out, 0, PyByteArray_FromStringAndSize((char *)buffOut, metaOut.buffLen)); + PyTuple_SetItem(out, 1, pymeta); + + delete [] buffOut; + return out; + } + + Py_RETURN_NONE; +} + +static PyObject *Device_manager_stop(Device_manager *self, PyObject *args) +{ + //Process arguments + const char *devarg = "/dev/video0"; + if(PyTuple_Size(args) >= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + + //Check this device is valid + std::map::iterator it = self->threadArgStore->find(devarg); + if(it==self->threadArgStore->end()) + { + PyErr_Format(PyExc_RuntimeError, "Device already not ready."); + Py_RETURN_NONE; + } + + class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; + threadArgs->StopDevice(); + + Py_RETURN_NONE; +} + +static PyObject *Device_manager_close(Device_manager *self, PyObject *args) +{ + //Process arguments + const char *devarg = "/dev/video0"; + if(PyTuple_Size(args) >= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + + //Check this device is valid + std::map::iterator it = self->threadArgStore->find(devarg); + if(it==self->threadArgStore->end()) + { + PyErr_Format(PyExc_RuntimeError, "Device already not ready."); + Py_RETURN_NONE; + } + + class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; + threadArgs->CloseDevice(); + + //Stop worker thread + threadArgs->Stop(); + + //Release memeory + threadArgs->WaitForStop(); + delete threadArgs; + self->threadArgStore->erase(devarg); + + Py_RETURN_NONE; +} + +static PyObject *Device_manager_list_devices(Device_manager *self) +{ + PyObject *out = PyList_New(0); + const char dir[] = "/dev"; + DIR *dp; + struct dirent *dirp; + if((dp = opendir(dir)) == NULL) { + printf("Error(%d) opening %s\n", errno, dir); + Py_RETURN_NONE; + } + + while ((dirp = readdir(dp)) != NULL) { + if (strncmp(dirp->d_name, "video", 5) != 0) continue; + std::string tmp = "/dev/"; + tmp.append(dirp->d_name); + PyList_Append(out, PyString_FromString(tmp.c_str())); + } + closedir(dp); + + PyList_Sort(out); + return out; +} + +// ********************************************************************* + +static PyMethodDef Device_manager_methods[] = { + {"open", (PyCFunction)Device_manager_open, METH_VARARGS, + "open(dev = '\\dev\\video0')\n\n" + "Open video capture."}, + {"set_format", (PyCFunction)Device_manager_set_format, METH_VARARGS, + "set_format(dev, size_x, size_y, pixel_format='RGB24') -> size_x, size_y\n\n" + "Request the video device to set image size and format. The device may " + "choose another size than requested and will return its choice. The " + "pixel format may be either RGB24, YUV420 or MJPEG."}, + {"start", (PyCFunction)Device_manager_Start, METH_VARARGS, + "start(dev = '\\dev\\video0', reqSize=(640, 480), reqFps = 30, fmt = 'MJPEG\', buffer_count = 10)\n\n" + "Start video capture."}, + {"get_frame", (PyCFunction)Device_manager_Get_frame, METH_VARARGS, + "start(dev = '\\dev\\video0'\n\n" + "Get video frame."}, + {"stop", (PyCFunction)Device_manager_stop, METH_VARARGS, + "stop(dev = '\\dev\\video0')\n\n" + "Stop video capture."}, + {"close", (PyCFunction)Device_manager_close, METH_VARARGS, + "close(dev = '\\dev\\video0')\n\n" + "Close video device. Subsequent calls to other methods will fail."}, + {"list_devices", (PyCFunction)Device_manager_list_devices, METH_NOARGS, + "list_devices()\n\n" + "List available capture devices."}, + {NULL} +}; + +static PyTypeObject Device_manager_type = { + PyObject_HEAD_INIT(NULL) + 0, "v4l2capture.Device_manager", sizeof(Device_manager), 0, + (destructor)Device_manager_dealloc, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, Py_TPFLAGS_DEFAULT, "Device_manager(path)\n\nOpens the video device at " + "the given path and returns an object that can capture images. The " + "constructor and all methods except close may raise IOError.", 0, 0, 0, + 0, 0, 0, Device_manager_methods, 0, 0, 0, 0, 0, 0, 0, + (initproc)Device_manager_init +}; + +static PyMethodDef Video_out_manager_methods[] = { + {"open", (PyCFunction)Video_out_manager_open, METH_VARARGS, + "open(dev = '\\dev\\video0', pixel_format, width, height)\n\n" + "Open video output."}, + {"send_frame", (PyCFunction)Video_out_manager_Send_frame, METH_VARARGS, + "send_frame(dev = '\\dev\\video0', img, pixel_format, width, height)\n\n" + "Send frame to video stream output."}, + {"close", (PyCFunction)Video_out_manager_close, METH_VARARGS, + "close(dev = '\\dev\\video0')\n\n" + "Close video device. Subsequent calls to other methods will fail."}, + {"list_devices", (PyCFunction)Video_out_manager_list_devices, METH_NOARGS, + "list_devices()\n\n" + "List available capture devices."}, + {NULL} +}; + +static PyTypeObject Video_out_manager_type = { + PyObject_HEAD_INIT(NULL) + 0, "v4l2capture.Video_out_manager", sizeof(Video_out_manager), 0, + (destructor)Video_out_manager_dealloc, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, Py_TPFLAGS_DEFAULT, "Video_out_manager(path)\n\nOpens the video device at " + "the given path and returns an object that can capture images. The " + "constructor and all methods except close may raise IOError.", 0, 0, 0, + 0, 0, 0, Video_out_manager_methods, 0, 0, 0, 0, 0, 0, 0, + (initproc)Video_out_manager_init +}; + +// ********************************************************************* + +static PyMethodDef module_methods[] = { + { "InsertHuffmanTable", (PyCFunction)InsertHuffmanTable, METH_VARARGS, NULL }, + { NULL, NULL, 0, NULL } +}; + +PyMODINIT_FUNC initv4l2capture(void) +{ + Device_manager_type.tp_new = PyType_GenericNew; + Video_out_manager_type.tp_new = PyType_GenericNew; + + if(PyType_Ready(&Device_manager_type) < 0) + { + return; + } + if(PyType_Ready(&Video_out_manager_type) < 0) + { + return; + } + + PyObject *module = Py_InitModule3("videolive", module_methods, + "Capture and stream video."); + + if(!module) + { + return; + } + + Py_INCREF(&Device_manager_type); + PyModule_AddObject(module, "Device_manager", (PyObject *)&Device_manager_type); + PyModule_AddObject(module, "Video_out_manager", (PyObject *)&Video_out_manager_type); + +} diff --git a/setup.py b/setup.py index c21a84a..b286d14 100755 --- a/setup.py +++ b/setup.py @@ -12,18 +12,18 @@ from distutils.core import Extension, setup setup( - name = "v4l2capture", - version = "1.4", - author = "Fredrik Portstrom, Tim Sheerman-Chase", - author_email = "fredrik@jemla.se", - url = "http://fredrik.jemla.eu/v4l2capture", - description = "Capture video with video4linux2", - long_description = "python-v4l2capture is a slim and easy to use Python " - "extension for capturing video with video4linux2.", - license = "Public Domain", + name = "videolive", + version = "1.0", + author = "Tim Sheerman-Chase", + author_email = "info@kinatomic", + url = "http://www.kinatomic.com", + description = "Capture and stream video", + long_description = "Capture and stream video in python", + license = "GPL v2 or later", classifiers = [ - "License :: Public Domain", + "License :: GPL", "Programming Language :: C++"], ext_modules = [ - Extension("v4l2capture", ["v4l2capture.cpp", "v4l2out.cpp", "pixfmt.cpp"], libraries = ["v4l2", "pthread", "jpeg"])]) + Extension("videolive", ["v4l2capture.cpp", "v4l2out.cpp", "pixfmt.cpp", "libvideolive.cpp"], + libraries = ["v4l2", "pthread", "jpeg"])]) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 3f9da57..8c4fbb0 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -1,14 +1,8 @@ -// python-v4l2capture -// Python extension to capture video with video4linux2 -// -// 2009, 2010, 2011 Fredrik Portstrom, released into the public domain -// 2011, Joakim Gebart -// 2013, Tim Sheerman-Chase -// See README for license + +#include "v4l2capture.h" #define USE_LIBV4L -#include #include #include #include @@ -16,11 +10,11 @@ #include #include #include -#include #include -#include #include -#include "v4l2out.h" +#include +#include + #include "pixfmt.h" #ifdef USE_LIBV4L @@ -41,53 +35,7 @@ Py_RETURN_NONE; \ } -struct buffer { - void *start; - size_t length; -}; - -typedef struct { - PyObject_HEAD - int fd; - struct buffer *buffers; - int buffer_count; -} Video_device; - -class Device_manager_cl{ -public: - PyObject_HEAD - std::map *threadArgStore; -}; -typedef Device_manager_cl Device_manager; - -static PyObject *Device_manager_stop(Device_manager *self, PyObject *args); -static PyObject *Device_manager_close(Device_manager *self, PyObject *args); - -struct capability { - int id; - const char *name; -}; - -/*static struct capability capabilities[] = { - { V4L2_CAP_ASYNCIO, "asyncio" }, - { V4L2_CAP_AUDIO, "audio" }, - { V4L2_CAP_HW_FREQ_SEEK, "hw_freq_seek" }, - { V4L2_CAP_RADIO, "radio" }, - { V4L2_CAP_RDS_CAPTURE, "rds_capture" }, - { V4L2_CAP_READWRITE, "readwrite" }, - { V4L2_CAP_SLICED_VBI_CAPTURE, "sliced_vbi_capture" }, - { V4L2_CAP_SLICED_VBI_OUTPUT, "sliced_vbi_output" }, - { V4L2_CAP_STREAMING, "streaming" }, - { V4L2_CAP_TUNER, "tuner" }, - { V4L2_CAP_VBI_CAPTURE, "vbi_capture" }, - { V4L2_CAP_VBI_OUTPUT, "vbi_output" }, - { V4L2_CAP_VIDEO_CAPTURE, "video_capture" }, - { V4L2_CAP_VIDEO_OUTPUT, "video_output" }, - { V4L2_CAP_VIDEO_OUTPUT_OVERLAY, "video_output_overlay" }, - { V4L2_CAP_VIDEO_OVERLAY, "video_overlay" } -};*/ - -static int my_ioctl(int fd, int request, void *arg, int utimeout = -1) +int my_ioctl(int fd, int request, void *arg, int utimeout = -1) { // Retry ioctl until it returns without being interrupted. @@ -134,1014 +82,540 @@ static int my_ioctl(int fd, int request, void *arg, int utimeout = -1) } } -static PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) +// *************************************************************************** + +Device_manager_Worker_thread_args::Device_manager_Worker_thread_args(const char *devNameIn) { - /* This converts an MJPEG frame into a standard JPEG binary - MJPEG images omit the huffman table if the standard table - is used. If it is missing, this function adds the table - into the file structure. */ + stop = 0; + stopped = 1; + deviceStarted = 0; + this->devName = devNameIn; + pthread_mutex_init(&lock, NULL); + buffer_counts = 10; + buffers = NULL; + stopDeviceFlag = 0; + closeDeviceFlag = 0; + frameWidth = 0; + frameHeight = 0; + decodedFrameBuffMaxSize = 10; + verbose = 0; + targetFmt = "RGB24"; +} - if(PyTuple_Size(args) < 1) +Device_manager_Worker_thread_args::~Device_manager_Worker_thread_args() +{ + if(deviceStarted) { - PyErr_BadArgument(); - PyErr_Format(PyExc_TypeError, "Function requires 1 argument"); - Py_RETURN_NONE; + this->StopDeviceInternal(); } - PyObject *inBuffer = PyTuple_GetItem(args, 0); - - if(!PyString_Check(inBuffer)) + if(fd!=-1) { - PyErr_BadArgument(); - PyErr_Format(PyExc_TypeError, "Argument 1 must be a string."); - //PyObject* type = PyObject_Type(inBuffer); - //PyObject_Print(type, stdout, Py_PRINT_RAW); - //Py_CLEAR(type); - - Py_RETURN_NONE; + this->CloseDeviceInternal(); } - unsigned char* inBufferPtr = (unsigned char*)PyString_AsString(inBuffer); - Py_ssize_t inBufferLen = PyString_Size(inBuffer); - std::string outBuffer; - - InsertHuffmanTableCTypes((unsigned char*)inBufferPtr, inBufferLen, outBuffer); + if(buffers) delete [] buffers; + this->buffers = NULL; - PyObject *outBufferPy = PyString_FromStringAndSize(outBuffer.c_str(), outBuffer.length()); + for(unsigned int i=0; idecodedFrameBuff[i]; + } + this->decodedFrameBuff.clear(); - return outBufferPy; + pthread_mutex_destroy(&lock); } - -// ********************************************************************** - -class SetFormatParams +void Device_manager_Worker_thread_args::Stop() { -public: - std::string fmt; - int width, height; - - SetFormatParams() - { - width = 0; - height = 0; - } + pthread_mutex_lock(&this->lock); + this->stop = 1; + pthread_mutex_unlock(&this->lock); +} - SetFormatParams(const SetFormatParams &in) +void Device_manager_Worker_thread_args::WaitForStop() +{ + while(1) { - SetFormatParams::operator=(in); - } + pthread_mutex_lock(&this->lock); + int s = this->stopped; + pthread_mutex_unlock(&this->lock); - const SetFormatParams &operator=(const SetFormatParams &in) - { - width = in.width; - height = in.height; - fmt = in.fmt; - return *this; + if(s) return; + usleep(10000); } -}; +} -class FrameMetaData +void Device_manager_Worker_thread_args::OpenDevice() { -public: - std::string fmt; - int width; - int height; - unsigned buffLen; - unsigned long sequence; - unsigned long tv_sec; - unsigned long tv_usec; - - FrameMetaData() - { - width = 0; - height = 0; - buffLen = 0; - sequence = 0; - tv_sec = 0; - tv_usec = 0; - } + pthread_mutex_lock(&this->lock); + this->openDeviceFlag.push_back(this->devName.c_str()); + pthread_mutex_unlock(&this->lock); +} - FrameMetaData(const FrameMetaData &in) - { - FrameMetaData::operator=(in); - } +void Device_manager_Worker_thread_args::SetFormat(const char *fmt, int width, int height) +{ + class SetFormatParams params; + params.fmt = fmt; + params.width = width; + params.height = height; + + pthread_mutex_lock(&this->lock); + this->setFormatFlags.push_back(params); + pthread_mutex_unlock(&this->lock); +} - const FrameMetaData &operator=(const FrameMetaData &in) - { - width = in.width; - height = in.height; - fmt = in.fmt; - buffLen = in.buffLen; - sequence = in.sequence; - tv_sec = in.tv_sec; - tv_usec = in.tv_usec; - return *this; - } +void Device_manager_Worker_thread_args::StartDevice(int buffer_count) +{ + pthread_mutex_lock(&this->lock); + this->startDeviceFlag.push_back(buffer_count); + pthread_mutex_unlock(&this->lock); +} -}; +void Device_manager_Worker_thread_args::StopDevice() +{ + pthread_mutex_lock(&this->lock); + this->stopDeviceFlag = 1; + pthread_mutex_unlock(&this->lock); +} +void Device_manager_Worker_thread_args::CloseDevice() +{ + pthread_mutex_lock(&this->lock); + this->closeDeviceFlag = 1; + pthread_mutex_unlock(&this->lock); +} -class Device_manager_Worker_thread_args +int Device_manager_Worker_thread_args::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) { -public: - Device_manager *self; - std::string devName; - int stop; - int stopped; - pthread_mutex_t lock; - std::vector openDeviceFlag; - std::vector startDeviceFlag; - std::vector setFormatFlags; - int stopDeviceFlag; - int closeDeviceFlag; - int deviceStarted; - int fd; - struct buffer *buffers; - int frameWidth, frameHeight; - int buffer_counts; - std::string pxFmt; - int verbose; - std::string targetFmt; - - std::vector decodedFrameBuff; - std::vector decodedFrameMetaBuff; - unsigned decodedFrameBuffMaxSize; - - Device_manager_Worker_thread_args(const char *devNameIn) + pthread_mutex_lock(&this->lock); + if(this->decodedFrameBuff.size()==0) { - stop = 0; - stopped = 1; - deviceStarted = 0; - this->devName = devNameIn; - pthread_mutex_init(&lock, NULL); - buffer_counts = 10; - buffers = NULL; - stopDeviceFlag = 0; - closeDeviceFlag = 0; - frameWidth = 0; - frameHeight = 0; - decodedFrameBuffMaxSize = 10; - verbose = 0; - targetFmt = "RGB24"; + //No frame found + *buffOut = NULL; + metaOut = NULL; + pthread_mutex_unlock(&this->lock); + return 0; } - virtual ~Device_manager_Worker_thread_args() - { - if(deviceStarted) - { - this->StopDeviceInternal(); - } - - if(fd!=-1) - { - this->CloseDeviceInternal(); - } - - if(buffers) delete [] buffers; - this->buffers = NULL; - - for(unsigned int i=0; idecodedFrameBuff[i]; - } - this->decodedFrameBuff.clear(); - - pthread_mutex_destroy(&lock); - } + //Return frame + *buffOut = this->decodedFrameBuff[0]; + *metaOut = this->decodedFrameMetaBuff[0]; + this->decodedFrameBuff.erase(this->decodedFrameBuff.begin()); + this->decodedFrameMetaBuff.erase(this->decodedFrameMetaBuff.begin()); + pthread_mutex_unlock(&this->lock); + return 1; +} - void Stop() - { - pthread_mutex_lock(&this->lock); - this->stop = 1; - pthread_mutex_unlock(&this->lock); - } +int Device_manager_Worker_thread_args::ReadFrame() +{ + if(this->fd<0) + throw std::runtime_error("File not open"); - void WaitForStop() - { - while(1) - { - pthread_mutex_lock(&this->lock); - int s = this->stopped; - pthread_mutex_unlock(&this->lock); + if(this->buffers == NULL) + throw std::runtime_error("Buffers have not been created"); - if(s) return; - usleep(10000); - } - } + struct v4l2_buffer buffer; + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; - void OpenDevice() + if(my_ioctl(this->fd, VIDIOC_DQBUF, &buffer, 10000)) { - pthread_mutex_lock(&this->lock); - this->openDeviceFlag.push_back(this->devName.c_str()); - pthread_mutex_unlock(&this->lock); + return 0; } - void SetFormat(const char *fmt, int width, int height) - { - class SetFormatParams params; - params.fmt = fmt; - params.width = width; - params.height = height; - - pthread_mutex_lock(&this->lock); - this->setFormatFlags.push_back(params); - pthread_mutex_unlock(&this->lock); - } + unsigned char *rgbBuff = NULL; + unsigned rgbBuffLen = 0; + int ok = DecodeFrame((const unsigned char*)this->buffers[buffer.index].start, buffer.bytesused, + this->pxFmt.c_str(), + this->frameWidth, + this->frameHeight, + this->targetFmt.c_str(), &rgbBuff, &rgbBuffLen); - void StartDevice(int buffer_count) + //Return a frame, decoded or not + pthread_mutex_lock(&this->lock); + + class FrameMetaData meta; + meta.width = this->frameWidth; + meta.height = this->frameHeight; + if(ok && rgbBuff != NULL) { - pthread_mutex_lock(&this->lock); - this->startDeviceFlag.push_back(buffer_count); - pthread_mutex_unlock(&this->lock); + meta.fmt = this->targetFmt; + meta.buffLen = rgbBuffLen; + this->decodedFrameBuff.push_back(rgbBuff); } - - void StopDevice() + else { - pthread_mutex_lock(&this->lock); - this->stopDeviceFlag = 1; - pthread_mutex_unlock(&this->lock); + //Make a copy of un-decodable buffer to return + unsigned char* buffOut = new unsigned char[buffer.bytesused]; + memcpy(buffOut, this->buffers[buffer.index].start, buffer.bytesused); + meta.fmt = this->pxFmt; + meta.buffLen = buffer.bytesused; + this->decodedFrameBuff.push_back(buffOut); } + meta.sequence = buffer.sequence; + meta.tv_sec = buffer.timestamp.tv_sec; + meta.tv_usec = buffer.timestamp.tv_usec; - void CloseDevice() + this->decodedFrameMetaBuff.push_back(meta); + while(this->decodedFrameBuff.size() > this->decodedFrameBuffMaxSize) { - pthread_mutex_lock(&this->lock); - this->closeDeviceFlag = 1; - pthread_mutex_unlock(&this->lock); - } - - int GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) - { - pthread_mutex_lock(&this->lock); - if(this->decodedFrameBuff.size()==0) - { - //No frame found - *buffOut = NULL; - metaOut = NULL; - pthread_mutex_unlock(&this->lock); - return 0; - } - - //Return frame - *buffOut = this->decodedFrameBuff[0]; - *metaOut = this->decodedFrameMetaBuff[0]; this->decodedFrameBuff.erase(this->decodedFrameBuff.begin()); this->decodedFrameMetaBuff.erase(this->decodedFrameMetaBuff.begin()); - pthread_mutex_unlock(&this->lock); - return 1; } + pthread_mutex_unlock(&this->lock); -protected: - int ReadFrame() + //Queue buffer for next frame + if(my_ioctl(this->fd, VIDIOC_QBUF, &buffer)) { - if(this->fd<0) - throw std::runtime_error("File not open"); - - if(this->buffers == NULL) - throw std::runtime_error("Buffers have not been created"); - - struct v4l2_buffer buffer; - buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - buffer.memory = V4L2_MEMORY_MMAP; - - if(my_ioctl(this->fd, VIDIOC_DQBUF, &buffer, 10000)) - { - return 0; - } - - unsigned char *rgbBuff = NULL; - unsigned rgbBuffLen = 0; - int ok = DecodeFrame((const unsigned char*)this->buffers[buffer.index].start, buffer.bytesused, - this->pxFmt.c_str(), - this->frameWidth, - this->frameHeight, - this->targetFmt.c_str(), &rgbBuff, &rgbBuffLen); - - //Return a frame, decoded or not - pthread_mutex_lock(&this->lock); - - class FrameMetaData meta; - meta.width = this->frameWidth; - meta.height = this->frameHeight; - if(ok && rgbBuff != NULL) - { - meta.fmt = this->targetFmt; - meta.buffLen = rgbBuffLen; - this->decodedFrameBuff.push_back(rgbBuff); - } - else - { - //Make a copy of un-decodable buffer to return - unsigned char* buffOut = new unsigned char[buffer.bytesused]; - memcpy(buffOut, this->buffers[buffer.index].start, buffer.bytesused); - meta.fmt = this->pxFmt; - meta.buffLen = buffer.bytesused; - this->decodedFrameBuff.push_back(buffOut); - } - meta.sequence = buffer.sequence; - meta.tv_sec = buffer.timestamp.tv_sec; - meta.tv_usec = buffer.timestamp.tv_usec; - - this->decodedFrameMetaBuff.push_back(meta); - while(this->decodedFrameBuff.size() > this->decodedFrameBuffMaxSize) - { - this->decodedFrameBuff.erase(this->decodedFrameBuff.begin()); - this->decodedFrameMetaBuff.erase(this->decodedFrameMetaBuff.begin()); - } - pthread_mutex_unlock(&this->lock); - - //Queue buffer for next frame - if(my_ioctl(this->fd, VIDIOC_QBUF, &buffer)) - { - throw std::runtime_error("VIDIOC_QBUF failed"); - } - - return 1; + throw std::runtime_error("VIDIOC_QBUF failed"); } - int OpenDeviceInternal() - { - if(verbose) printf("OpenDeviceInternal\n"); - //Open the video device. - this->fd = v4l2_open(this->devName.c_str(), O_RDWR | O_NONBLOCK); + return 1; +} - if(fd < 0) - { - throw std::runtime_error("Error opening device"); - } +int Device_manager_Worker_thread_args::OpenDeviceInternal() +{ + if(verbose) printf("OpenDeviceInternal\n"); + //Open the video device. + this->fd = v4l2_open(this->devName.c_str(), O_RDWR | O_NONBLOCK); - this->deviceStarted = 0; - if(verbose) printf("Done opening\n"); - return 1; + if(fd < 0) + { + throw std::runtime_error("Error opening device"); } - int SetFormatInternal(class SetFormatParams &args) - { - if(verbose) printf("SetFormatInternal\n"); - //int size_x, int size_y, const char *fmt; + this->deviceStarted = 0; + if(verbose) printf("Done opening\n"); + return 1; +} - struct v4l2_format format; - format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - format.fmt.pix.width = args.width; - format.fmt.pix.height = args.height; +int Device_manager_Worker_thread_args::SetFormatInternal(class SetFormatParams &args) +{ + if(verbose) printf("SetFormatInternal\n"); + //int size_x, int size_y, const char *fmt; + + struct v4l2_format format; + format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + format.fmt.pix.width = args.width; + format.fmt.pix.height = args.height; + format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; + + if(strcmp(args.fmt.c_str(), "MJPEG")==0) + format.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; + if(strcmp(args.fmt.c_str(), "RGB24")==0) format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; + if(strcmp(args.fmt.c_str(), "YUV420")==0) + format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420; + if(strcmp(args.fmt.c_str(), "YVU420")==0) + format.fmt.pix.pixelformat = V4L2_PIX_FMT_YVU420; + if(strcmp(args.fmt.c_str(), "YUYV")==0) + format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; - if(strcmp(args.fmt.c_str(), "MJPEG")==0) - format.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; - if(strcmp(args.fmt.c_str(), "RGB24")==0) - format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; - if(strcmp(args.fmt.c_str(), "YUV420")==0) - format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420; - if(strcmp(args.fmt.c_str(), "YVU420")==0) - format.fmt.pix.pixelformat = V4L2_PIX_FMT_YVU420; - if(strcmp(args.fmt.c_str(), "YUYV")==0) - format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; - - format.fmt.pix.field = V4L2_FIELD_NONE; - format.fmt.pix.bytesperline = 0; - - if(my_ioctl(this->fd, VIDIOC_S_FMT, &format)) - { - return 0; - } + format.fmt.pix.field = V4L2_FIELD_NONE; + format.fmt.pix.bytesperline = 0; - //Store pixel format for decoding usage later - //this->pxFmt = args.fmt; - //this->frameWidth = args.width; - //this->frameHeight = args.height; - this->GetFormatInternal(); - - return 1; - } - - int GetFormatInternal() + if(my_ioctl(this->fd, VIDIOC_S_FMT, &format)) { - struct v4l2_format format; - format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - if(my_ioctl(this->fd, VIDIOC_G_FMT, &format)) - { - return 0; - } - - this->frameWidth = format.fmt.pix.width; - this->frameHeight = format.fmt.pix.height; - - switch(format.fmt.pix.pixelformat) - { - case V4L2_PIX_FMT_MJPEG: - this->pxFmt = "MJPEG"; - break; - case V4L2_PIX_FMT_RGB24: - this->pxFmt = "RGB24"; - break; - case V4L2_PIX_FMT_YUV420: - this->pxFmt = "YUV420"; - break; - case V4L2_PIX_FMT_YVU420: - this->pxFmt = "YVU420"; - break; - case V4L2_PIX_FMT_YUYV: - this->pxFmt = "YUYV"; - break; - default: - this->pxFmt = "Unknown "; - std::ostringstream oss; - oss << format.fmt.pix.pixelformat; - this->pxFmt.append(oss.str()); - - break; - } - - if(verbose) printf("Current format %s %i %i\n", this->pxFmt.c_str(), this->frameWidth, this->frameHeight); - return 1; + return 0; } - int StartDeviceInternal(int buffer_count = 10) - { - if(verbose) printf("StartDeviceInternal\n"); - //Check this device has not already been start - if(this->fd==-1) - { - throw std::runtime_error("Device not open"); - } - - //Set other parameters for capture - //TODO - - /* - //Query current pixel format - self.size_x, self.size_y, self.pixelFmt = self.video.get_format() - - //Set target frames per second - self.fps = self.video.set_fps(reqFps) - */ - - // Create a buffer to store image data in. This must be done before - // calling 'start' if v4l2capture is compiled with libv4l2. Otherwise - // raises IOError. - - if(this->pxFmt.length()==0) - { - //Get current pixel format - //TODO - int ret = GetFormatInternal(); - if(!ret) throw std::runtime_error("Could not determine image format"); - } - - struct v4l2_requestbuffers reqbuf; - reqbuf.count = buffer_count; - reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - reqbuf.memory = V4L2_MEMORY_MMAP; - - if(my_ioctl(this->fd, VIDIOC_REQBUFS, &reqbuf)) - { - throw std::runtime_error("VIDIOC_REQBUFS failed"); - } + //Store pixel format for decoding usage later + //this->pxFmt = args.fmt; + //this->frameWidth = args.width; + //this->frameHeight = args.height; + this->GetFormatInternal(); - if(!reqbuf.count) - { - throw std::runtime_error("Not enough buffer memory"); - } + return 1; +} - this->buffers = new struct buffer [reqbuf.count]; +int Device_manager_Worker_thread_args::GetFormatInternal() +{ + struct v4l2_format format; + format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if(my_ioctl(this->fd, VIDIOC_G_FMT, &format)) + { + return 0; + } + + this->frameWidth = format.fmt.pix.width; + this->frameHeight = format.fmt.pix.height; + + switch(format.fmt.pix.pixelformat) + { + case V4L2_PIX_FMT_MJPEG: + this->pxFmt = "MJPEG"; + break; + case V4L2_PIX_FMT_RGB24: + this->pxFmt = "RGB24"; + break; + case V4L2_PIX_FMT_YUV420: + this->pxFmt = "YUV420"; + break; + case V4L2_PIX_FMT_YVU420: + this->pxFmt = "YVU420"; + break; + case V4L2_PIX_FMT_YUYV: + this->pxFmt = "YUYV"; + break; + default: + this->pxFmt = "Unknown "; + std::ostringstream oss; + oss << format.fmt.pix.pixelformat; + this->pxFmt.append(oss.str()); + + break; + } + + if(verbose) printf("Current format %s %i %i\n", this->pxFmt.c_str(), this->frameWidth, this->frameHeight); + return 1; +} - if(this->buffers == NULL) - { - throw std::runtime_error("Failed to allocate buffer memory"); - } +int Device_manager_Worker_thread_args::StartDeviceInternal(int buffer_count = 10) +{ + if(verbose) printf("StartDeviceInternal\n"); + //Check this device has not already been start + if(this->fd==-1) + { + throw std::runtime_error("Device not open"); + } - for(unsigned int i = 0; i < reqbuf.count; i++) - { - struct v4l2_buffer buffer; - buffer.index = i; - buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - buffer.memory = V4L2_MEMORY_MMAP; - - if(my_ioctl(fd, VIDIOC_QUERYBUF, &buffer)) - { - throw std::runtime_error("VIDIOC_QUERYBUF failed"); - } - - this->buffers[i].length = buffer.length; - this->buffers[i].start = v4l2_mmap(NULL, buffer.length, - PROT_READ | PROT_WRITE, MAP_SHARED, fd, buffer.m.offset); - - if(this->buffers[i].start == MAP_FAILED) - { - throw std::runtime_error("v4l2_mmap failed"); - } - } + //Set other parameters for capture + //TODO - this->buffer_counts = reqbuf.count; + /* + //Query current pixel format + self.size_x, self.size_y, self.pixelFmt = self.video.get_format() - // Send the buffer to the device. Some devices require this to be done - // before calling 'start'. + //Set target frames per second + self.fps = self.video.set_fps(reqFps) + */ - for(int i = 0; i < buffer_count; i++) - { - struct v4l2_buffer buffer; - buffer.index = i; - buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - buffer.memory = V4L2_MEMORY_MMAP; - - if(my_ioctl(fd, VIDIOC_QBUF, &buffer)) - { - //This may fail with some devices but does not seem to be harmful. - } - } + // Create a buffer to store image data in. This must be done before + // calling 'start' if v4l2capture is compiled with libv4l2. Otherwise + // raises IOError. - // Start the device. This lights the LED if it's a camera that has one. - enum v4l2_buf_type type; - type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if(this->pxFmt.length()==0) + { + //Get current pixel format + //TODO + int ret = GetFormatInternal(); + if(!ret) throw std::runtime_error("Could not determine image format"); + } - if(my_ioctl(fd, VIDIOC_STREAMON, &type)) - { - throw std::runtime_error("VIDIOC_STREAMON failed"); - } + struct v4l2_requestbuffers reqbuf; + reqbuf.count = buffer_count; + reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + reqbuf.memory = V4L2_MEMORY_MMAP; - this->deviceStarted = 1; - if(verbose) printf("Started ok\n"); - return 1; + if(my_ioctl(this->fd, VIDIOC_REQBUFS, &reqbuf)) + { + throw std::runtime_error("VIDIOC_REQBUFS failed"); } - void StopDeviceInternal() + if(!reqbuf.count) { - if(verbose) printf("StopDeviceInternal\n"); - if(this->fd==-1) - { - throw std::runtime_error("Device not started"); - } - - //Signal V4l2 api - enum v4l2_buf_type type; - type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + throw std::runtime_error("Not enough buffer memory"); + } - if(my_ioctl(this->fd, VIDIOC_STREAMOFF, &type)) - { - throw std::runtime_error("VIDIOC_STREAMOFF failed"); - } + this->buffers = new struct buffer [reqbuf.count]; - this->deviceStarted = 0; + if(this->buffers == NULL) + { + throw std::runtime_error("Failed to allocate buffer memory"); } - int CloseDeviceInternal() + for(unsigned int i = 0; i < reqbuf.count; i++) { - if(verbose) printf("CloseDeviceInternal\n"); - if(this->fd == -1) + struct v4l2_buffer buffer; + buffer.index = i; + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; + + if(my_ioctl(fd, VIDIOC_QUERYBUF, &buffer)) { - throw std::runtime_error("Device not open"); + throw std::runtime_error("VIDIOC_QUERYBUF failed"); } - if(this->deviceStarted) - StopDeviceInternal(); + this->buffers[i].length = buffer.length; + this->buffers[i].start = v4l2_mmap(NULL, buffer.length, + PROT_READ | PROT_WRITE, MAP_SHARED, fd, buffer.m.offset); - if(this->buffers!= NULL) + if(this->buffers[i].start == MAP_FAILED) { - for(int i = 0; i < this->buffer_counts; i++) - { - v4l2_munmap(this->buffers[i].start, this->buffers[i].length); - } - delete [] this->buffers; + throw std::runtime_error("v4l2_mmap failed"); } - this->buffers = NULL; - - //Release memory - v4l2_close(fd); - fd = -1; - return 1; } -public: - void Run() + this->buffer_counts = reqbuf.count; + + // Send the buffer to the device. Some devices require this to be done + // before calling 'start'. + + for(int i = 0; i < buffer_count; i++) { - if(verbose) printf("Thread started: %s\n", this->devName.c_str()); - int running = 1; - pthread_mutex_lock(&this->lock); - this->stopped = 0; - pthread_mutex_unlock(&this->lock); + struct v4l2_buffer buffer; + buffer.index = i; + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; - try - { - while(running) - { - //printf("Sleep\n"); - usleep(1000); - - if(deviceStarted) this->ReadFrame(); - - pthread_mutex_lock(&this->lock); - try - { - - if(this->openDeviceFlag.size() > 0) - { - std::string devName = this->openDeviceFlag[this->openDeviceFlag.size()-1]; - this->openDeviceFlag.pop_back(); - this->OpenDeviceInternal(); - } - - if(this->setFormatFlags.size() > 0 - && this->openDeviceFlag.size() == 0) - { - class SetFormatParams params = this->setFormatFlags[this->setFormatFlags.size()-1]; - this->setFormatFlags.pop_back(); - this->SetFormatInternal(params); - } - - if(this->startDeviceFlag.size() > 0 - && this->openDeviceFlag.size() == 0 - && this->setFormatFlags.size() == 0) - { - int buffer_count = this->startDeviceFlag[this->startDeviceFlag.size()-1]; - this->startDeviceFlag.pop_back(); - this->StartDeviceInternal(buffer_count); - } - - if(this->stopDeviceFlag - && this->openDeviceFlag.size() == 0 - && this->setFormatFlags.size() == 0 - && this->startDeviceFlag.size() == 0) - { - this->StopDeviceInternal(); - this->stopDeviceFlag = 0; - } - - if(this->closeDeviceFlag - && this->openDeviceFlag.size() == 0 - && this->setFormatFlags.size() == 0 - && this->startDeviceFlag.size() == 0 - && !this->stopDeviceFlag) - { - this->CloseDeviceInternal(); - this->closeDeviceFlag = 0; - } - - running = !this->stop; - } - catch(std::exception &err) - { - if(verbose) printf("An exception has occured: %s\n", err.what()); - running = 0; - } - pthread_mutex_unlock(&this->lock); - } - } - catch(std::exception &err) + if(my_ioctl(fd, VIDIOC_QBUF, &buffer)) { - if(verbose) printf("An exception has occured: %s\n", err.what()); + //This may fail with some devices but does not seem to be harmful. } - - if(verbose) printf("Thread stopping\n"); - pthread_mutex_lock(&this->lock); - this->stopped = 1; - pthread_mutex_unlock(&this->lock); - }; -}; - -void *Device_manager_Worker_thread(void *arg) -{ - class Device_manager_Worker_thread_args *argobj = (class Device_manager_Worker_thread_args*) arg; - argobj->Run(); - - return NULL; -} - -// ********************************************************************** - -static void Device_manager_dealloc(Device_manager *self) -{ - //Stop high level threads - for(std::map::iterator it = self->threadArgStore->begin(); - it != self->threadArgStore->end(); it++) - { - PyObject *args = PyTuple_New(1); - PyTuple_SetItem(args, 0, PyString_FromString(it->first.c_str())); - Device_manager_stop(self, args); - Py_DECREF(args); } - delete self->threadArgStore; - self->ob_type->tp_free((PyObject *)self); -} + // Start the device. This lights the LED if it's a camera that has one. + enum v4l2_buf_type type; + type = V4L2_BUF_TYPE_VIDEO_CAPTURE; -static int Device_manager_init(Device_manager *self, PyObject *args, - PyObject *kwargs) -{ - self->threadArgStore = new std::map; - return 0; -} - -static PyObject *Device_manager_open(Device_manager *self, PyObject *args) -{ - //Process arguments - const char *devarg = "/dev/video0"; - if(PyTuple_Size(args) >= 1) + if(my_ioctl(fd, VIDIOC_STREAMON, &type)) { - PyObject *pydevarg = PyTuple_GetItem(args, 0); - devarg = PyString_AsString(pydevarg); + throw std::runtime_error("VIDIOC_STREAMON failed"); } - //Check this device has not already been opened - std::map::iterator it = self->threadArgStore->find(devarg); - if(it!=self->threadArgStore->end()) - { - PyErr_Format(PyExc_RuntimeError, "Device already opened."); - Py_RETURN_NONE; - } - - pthread_t thread; - Device_manager_Worker_thread_args *threadArgs = new Device_manager_Worker_thread_args(devarg); - (*self->threadArgStore)[devarg] = threadArgs; - threadArgs->self = self; - pthread_create(&thread, NULL, Device_manager_Worker_thread, threadArgs); - - threadArgs->OpenDevice(); - - Py_RETURN_NONE; + this->deviceStarted = 1; + if(verbose) printf("Started ok\n"); + return 1; } - -static PyObject *Device_manager_set_format(Device_manager *self, PyObject *args) +void Device_manager_Worker_thread_args::StopDeviceInternal() { - int size_x; - int size_y; - const char *fmt = NULL; - const char *devarg = NULL; - - if(!PyArg_ParseTuple(args, "sii|s", &devarg, &size_x, &size_y, &fmt)) + if(verbose) printf("StopDeviceInternal\n"); + if(this->fd==-1) { - Py_RETURN_NONE; + throw std::runtime_error("Device not started"); } - //Check this device is valid - std::map::iterator it = self->threadArgStore->find(devarg); - if(it==self->threadArgStore->end()) + //Signal V4l2 api + enum v4l2_buf_type type; + type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + if(my_ioctl(this->fd, VIDIOC_STREAMOFF, &type)) { - PyErr_Format(PyExc_RuntimeError, "Device already not ready."); - Py_RETURN_NONE; + throw std::runtime_error("VIDIOC_STREAMOFF failed"); } - class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; - threadArgs->SetFormat(fmt, size_x, size_y); - - Py_RETURN_NONE; + this->deviceStarted = 0; } -static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) +int Device_manager_Worker_thread_args::CloseDeviceInternal() { - - //Process arguments - const char *devarg = "/dev/video0"; - if(PyTuple_Size(args) >= 1) + if(verbose) printf("CloseDeviceInternal\n"); + if(this->fd == -1) { - PyObject *pydevarg = PyTuple_GetItem(args, 0); - devarg = PyString_AsString(pydevarg); + throw std::runtime_error("Device not open"); } - long buffer_count = 10; - if(PyTuple_Size(args) >= 4) - { - PyObject *pybufferarg = PyTuple_GetItem(args, 4); - buffer_count = PyInt_AsLong(pybufferarg); - } + if(this->deviceStarted) + StopDeviceInternal(); - //Check this device is valid - std::map::iterator it = self->threadArgStore->find(devarg); - if(it==self->threadArgStore->end()) + if(this->buffers!= NULL) { - PyErr_Format(PyExc_RuntimeError, "Device already not ready."); - Py_RETURN_NONE; + for(int i = 0; i < this->buffer_counts; i++) + { + v4l2_munmap(this->buffers[i].start, this->buffers[i].length); + } + delete [] this->buffers; } + this->buffers = NULL; - class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; - threadArgs->StartDevice(buffer_count); - - Py_RETURN_NONE; + //Release memory + v4l2_close(fd); + fd = -1; + return 1; } -static PyObject *Device_manager_Get_frame(Device_manager *self, PyObject *args) +void Device_manager_Worker_thread_args::Run() { + if(verbose) printf("Thread started: %s\n", this->devName.c_str()); + int running = 1; + pthread_mutex_lock(&this->lock); + this->stopped = 0; + pthread_mutex_unlock(&this->lock); - //Process arguments - const char *devarg = "/dev/video0"; - if(PyTuple_Size(args) >= 1) + try { - PyObject *pydevarg = PyTuple_GetItem(args, 0); - devarg = PyString_AsString(pydevarg); - } - - //Check this device is valid - std::map::iterator it = self->threadArgStore->find(devarg); - if(it==self->threadArgStore->end()) + while(running) { - PyErr_Format(PyExc_RuntimeError, "Device already not ready."); - Py_RETURN_NONE; - } + //printf("Sleep\n"); + usleep(1000); - class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; - unsigned char *buffOut = NULL; - class FrameMetaData metaOut; + if(deviceStarted) this->ReadFrame(); - int ok = threadArgs->GetFrame(&buffOut, &metaOut); - if(ok && buffOut != NULL) - { - //Format output to python - PyObject *pymeta = PyDict_New(); - PyDict_SetItemString(pymeta, "width", PyInt_FromLong(metaOut.width)); - PyDict_SetItemString(pymeta, "height", PyInt_FromLong(metaOut.height)); - PyDict_SetItemString(pymeta, "format", PyString_FromString(metaOut.fmt.c_str())); - PyDict_SetItemString(pymeta, "sequence", PyInt_FromLong(metaOut.sequence)); - PyDict_SetItemString(pymeta, "tv_sec", PyInt_FromLong(metaOut.tv_sec)); - PyDict_SetItemString(pymeta, "tv_usec", PyInt_FromLong(metaOut.tv_usec)); - - PyObject *out = PyTuple_New(2); - PyTuple_SetItem(out, 0, PyByteArray_FromStringAndSize((char *)buffOut, metaOut.buffLen)); - PyTuple_SetItem(out, 1, pymeta); - - delete [] buffOut; - return out; - } - - Py_RETURN_NONE; -} + pthread_mutex_lock(&this->lock); + try + { -static PyObject *Device_manager_stop(Device_manager *self, PyObject *args) -{ - //Process arguments - const char *devarg = "/dev/video0"; - if(PyTuple_Size(args) >= 1) - { - PyObject *pydevarg = PyTuple_GetItem(args, 0); - devarg = PyString_AsString(pydevarg); - } + if(this->openDeviceFlag.size() > 0) + { + std::string devName = this->openDeviceFlag[this->openDeviceFlag.size()-1]; + this->openDeviceFlag.pop_back(); + this->OpenDeviceInternal(); + } - //Check this device is valid - std::map::iterator it = self->threadArgStore->find(devarg); - if(it==self->threadArgStore->end()) - { - PyErr_Format(PyExc_RuntimeError, "Device already not ready."); - Py_RETURN_NONE; - } + if(this->setFormatFlags.size() > 0 + && this->openDeviceFlag.size() == 0) + { + class SetFormatParams params = this->setFormatFlags[this->setFormatFlags.size()-1]; + this->setFormatFlags.pop_back(); + this->SetFormatInternal(params); + } - class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; - threadArgs->StopDevice(); + if(this->startDeviceFlag.size() > 0 + && this->openDeviceFlag.size() == 0 + && this->setFormatFlags.size() == 0) + { + int buffer_count = this->startDeviceFlag[this->startDeviceFlag.size()-1]; + this->startDeviceFlag.pop_back(); + this->StartDeviceInternal(buffer_count); + } - Py_RETURN_NONE; -} + if(this->stopDeviceFlag + && this->openDeviceFlag.size() == 0 + && this->setFormatFlags.size() == 0 + && this->startDeviceFlag.size() == 0) + { + this->StopDeviceInternal(); + this->stopDeviceFlag = 0; + } -static PyObject *Device_manager_close(Device_manager *self, PyObject *args) -{ - //Process arguments - const char *devarg = "/dev/video0"; - if(PyTuple_Size(args) >= 1) - { - PyObject *pydevarg = PyTuple_GetItem(args, 0); - devarg = PyString_AsString(pydevarg); + if(this->closeDeviceFlag + && this->openDeviceFlag.size() == 0 + && this->setFormatFlags.size() == 0 + && this->startDeviceFlag.size() == 0 + && !this->stopDeviceFlag) + { + this->CloseDeviceInternal(); + this->closeDeviceFlag = 0; + } + + running = !this->stop; + } + catch(std::exception &err) + { + if(verbose) printf("An exception has occured: %s\n", err.what()); + running = 0; + } + pthread_mutex_unlock(&this->lock); } - - //Check this device is valid - std::map::iterator it = self->threadArgStore->find(devarg); - if(it==self->threadArgStore->end()) + } + catch(std::exception &err) { - PyErr_Format(PyExc_RuntimeError, "Device already not ready."); - Py_RETURN_NONE; + if(verbose) printf("An exception has occured: %s\n", err.what()); } - class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; - threadArgs->CloseDevice(); - - //Stop worker thread - threadArgs->Stop(); - - //Release memeory - threadArgs->WaitForStop(); - delete threadArgs; - self->threadArgStore->erase(devarg); - - Py_RETURN_NONE; + if(verbose) printf("Thread stopping\n"); + pthread_mutex_lock(&this->lock); + this->stopped = 1; + pthread_mutex_unlock(&this->lock); } -static PyObject *Device_manager_list_devices(Device_manager *self) +void *Device_manager_Worker_thread(void *arg) { - PyObject *out = PyList_New(0); - const char dir[] = "/dev"; - DIR *dp; - struct dirent *dirp; - if((dp = opendir(dir)) == NULL) { - printf("Error(%d) opening %s\n", errno, dir); - Py_RETURN_NONE; - } - - while ((dirp = readdir(dp)) != NULL) { - if (strncmp(dirp->d_name, "video", 5) != 0) continue; - std::string tmp = "/dev/"; - tmp.append(dirp->d_name); - PyList_Append(out, PyString_FromString(tmp.c_str())); - } - closedir(dp); + class Device_manager_Worker_thread_args *argobj = (class Device_manager_Worker_thread_args*) arg; + argobj->Run(); - PyList_Sort(out); - return out; + return NULL; } -// ********************************************************************* - -static PyMethodDef Device_manager_methods[] = { - {"open", (PyCFunction)Device_manager_open, METH_VARARGS, - "open(dev = '\\dev\\video0')\n\n" - "Open video capture."}, - {"set_format", (PyCFunction)Device_manager_set_format, METH_VARARGS, - "set_format(dev, size_x, size_y, pixel_format='RGB24') -> size_x, size_y\n\n" - "Request the video device to set image size and format. The device may " - "choose another size than requested and will return its choice. The " - "pixel format may be either RGB24, YUV420 or MJPEG."}, - {"start", (PyCFunction)Device_manager_Start, METH_VARARGS, - "start(dev = '\\dev\\video0', reqSize=(640, 480), reqFps = 30, fmt = 'MJPEG\', buffer_count = 10)\n\n" - "Start video capture."}, - {"get_frame", (PyCFunction)Device_manager_Get_frame, METH_VARARGS, - "start(dev = '\\dev\\video0'\n\n" - "Get video frame."}, - {"stop", (PyCFunction)Device_manager_stop, METH_VARARGS, - "stop(dev = '\\dev\\video0')\n\n" - "Stop video capture."}, - {"close", (PyCFunction)Device_manager_close, METH_VARARGS, - "close(dev = '\\dev\\video0')\n\n" - "Close video device. Subsequent calls to other methods will fail."}, - {"list_devices", (PyCFunction)Device_manager_list_devices, METH_NOARGS, - "list_devices()\n\n" - "List available capture devices."}, - {NULL} -}; - -static PyTypeObject Device_manager_type = { - PyObject_HEAD_INIT(NULL) - 0, "v4l2capture.Device_manager", sizeof(Device_manager), 0, - (destructor)Device_manager_dealloc, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, Py_TPFLAGS_DEFAULT, "Device_manager(path)\n\nOpens the video device at " - "the given path and returns an object that can capture images. The " - "constructor and all methods except close may raise IOError.", 0, 0, 0, - 0, 0, 0, Device_manager_methods, 0, 0, 0, 0, 0, 0, 0, - (initproc)Device_manager_init -}; - -static PyMethodDef Video_out_manager_methods[] = { - {"open", (PyCFunction)Video_out_manager_open, METH_VARARGS, - "open(dev = '\\dev\\video0', pixel_format, width, height)\n\n" - "Open video output."}, - {"send_frame", (PyCFunction)Video_out_manager_Send_frame, METH_VARARGS, - "send_frame(dev = '\\dev\\video0', img, pixel_format, width, height)\n\n" - "Send frame to video stream output."}, - {"close", (PyCFunction)Video_out_manager_close, METH_VARARGS, - "close(dev = '\\dev\\video0')\n\n" - "Close video device. Subsequent calls to other methods will fail."}, - {"list_devices", (PyCFunction)Video_out_manager_list_devices, METH_NOARGS, - "list_devices()\n\n" - "List available capture devices."}, - {NULL} -}; - -static PyTypeObject Video_out_manager_type = { - PyObject_HEAD_INIT(NULL) - 0, "v4l2capture.Video_out_manager", sizeof(Video_out_manager), 0, - (destructor)Video_out_manager_dealloc, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, Py_TPFLAGS_DEFAULT, "Video_out_manager(path)\n\nOpens the video device at " - "the given path and returns an object that can capture images. The " - "constructor and all methods except close may raise IOError.", 0, 0, 0, - 0, 0, 0, Video_out_manager_methods, 0, 0, 0, 0, 0, 0, 0, - (initproc)Video_out_manager_init -}; - -// ********************************************************************* - -static PyMethodDef module_methods[] = { - { "InsertHuffmanTable", (PyCFunction)InsertHuffmanTable, METH_VARARGS, NULL }, - { NULL, NULL, 0, NULL } -}; - -PyMODINIT_FUNC initv4l2capture(void) -{ - Device_manager_type.tp_new = PyType_GenericNew; - Video_out_manager_type.tp_new = PyType_GenericNew; - - if(PyType_Ready(&Device_manager_type) < 0) - { - return; - } - if(PyType_Ready(&Video_out_manager_type) < 0) - { - return; - } - - PyObject *module = Py_InitModule3("v4l2capture", module_methods, - "Capture video with video4linux2."); - - if(!module) - { - return; - } - - Py_INCREF(&Device_manager_type); - PyModule_AddObject(module, "Device_manager", (PyObject *)&Device_manager_type); - PyModule_AddObject(module, "Video_out_manager", (PyObject *)&Video_out_manager_type); - -} diff --git a/v4l2capture.h b/v4l2capture.h new file mode 100644 index 0000000..6de27e5 --- /dev/null +++ b/v4l2capture.h @@ -0,0 +1,171 @@ +// python-v4l2capture +// Python extension to capture video with video4linux2 +// +// 2009, 2010, 2011 Fredrik Portstrom, released into the public domain +// 2011, Joakim Gebart +// 2013, Tim Sheerman-Chase +// See README for license + +#ifndef V4L2CAPTURE_H +#define V4L2CAPTURE_H + +#include +#include +#include + +struct buffer { + void *start; + size_t length; +}; + +struct capability { + int id; + const char *name; +}; + + +class FrameMetaData +{ +public: + std::string fmt; + int width; + int height; + unsigned buffLen; + unsigned long sequence; + unsigned long tv_sec; + unsigned long tv_usec; + + FrameMetaData() + { + width = 0; + height = 0; + buffLen = 0; + sequence = 0; + tv_sec = 0; + tv_usec = 0; + } + + FrameMetaData(const FrameMetaData &in) + { + FrameMetaData::operator=(in); + } + + const FrameMetaData &operator=(const FrameMetaData &in) + { + width = in.width; + height = in.height; + fmt = in.fmt; + buffLen = in.buffLen; + sequence = in.sequence; + tv_sec = in.tv_sec; + tv_usec = in.tv_usec; + return *this; + } + +}; + +// ********************************************************************** + +class SetFormatParams +{ +public: + std::string fmt; + int width, height; + + SetFormatParams() + { + width = 0; + height = 0; + } + + SetFormatParams(const SetFormatParams &in) + { + SetFormatParams::operator=(in); + } + + const SetFormatParams &operator=(const SetFormatParams &in) + { + width = in.width; + height = in.height; + fmt = in.fmt; + return *this; + } +}; + +/*static struct capability capabilities[] = { + { V4L2_CAP_ASYNCIO, "asyncio" }, + { V4L2_CAP_AUDIO, "audio" }, + { V4L2_CAP_HW_FREQ_SEEK, "hw_freq_seek" }, + { V4L2_CAP_RADIO, "radio" }, + { V4L2_CAP_RDS_CAPTURE, "rds_capture" }, + { V4L2_CAP_READWRITE, "readwrite" }, + { V4L2_CAP_SLICED_VBI_CAPTURE, "sliced_vbi_capture" }, + { V4L2_CAP_SLICED_VBI_OUTPUT, "sliced_vbi_output" }, + { V4L2_CAP_STREAMING, "streaming" }, + { V4L2_CAP_TUNER, "tuner" }, + { V4L2_CAP_VBI_CAPTURE, "vbi_capture" }, + { V4L2_CAP_VBI_OUTPUT, "vbi_output" }, + { V4L2_CAP_VIDEO_CAPTURE, "video_capture" }, + { V4L2_CAP_VIDEO_OUTPUT, "video_output" }, + { V4L2_CAP_VIDEO_OUTPUT_OVERLAY, "video_output_overlay" }, + { V4L2_CAP_VIDEO_OVERLAY, "video_overlay" } +};*/ + +int my_ioctl(int fd, int request, void *arg, int utimeout); + +class Device_manager_Worker_thread_args +{ +public: + //Device_manager *self; + std::string devName; + int stop; + int stopped; + pthread_mutex_t lock; + std::vector openDeviceFlag; + std::vector startDeviceFlag; + std::vector setFormatFlags; + int stopDeviceFlag; + int closeDeviceFlag; + int deviceStarted; + int fd; + struct buffer *buffers; + int frameWidth, frameHeight; + int buffer_counts; + std::string pxFmt; + int verbose; + std::string targetFmt; + + std::vector decodedFrameBuff; + std::vector decodedFrameMetaBuff; + unsigned decodedFrameBuffMaxSize; + + Device_manager_Worker_thread_args(const char *devNameIn); + virtual ~Device_manager_Worker_thread_args(); + void Stop(); + void WaitForStop(); + void OpenDevice(); + void SetFormat(const char *fmt, int width, int height); + void StartDevice(int buffer_count); + void StopDevice(); + void CloseDevice(); + int GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut); + +protected: + int ReadFrame(); + int OpenDeviceInternal(); + int SetFormatInternal(class SetFormatParams &args); + int GetFormatInternal(); + int StartDeviceInternal(int buffer_count); + void StopDeviceInternal(); + int CloseDeviceInternal(); + +public: + void Run(); +}; + +void *Device_manager_Worker_thread(void *arg); + +// ********************************************************************** + +#endif //V4L2CAPTURE_H + From 3ef4b8cf924e2d7a6c4f4a60fcc6d5885a878ee2 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Fri, 15 Nov 2013 10:12:06 +0000 Subject: [PATCH 076/256] Use correct init function name --- libvideolive.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libvideolive.cpp b/libvideolive.cpp index 929e771..95b4abb 100644 --- a/libvideolive.cpp +++ b/libvideolive.cpp @@ -388,7 +388,7 @@ static PyMethodDef module_methods[] = { { NULL, NULL, 0, NULL } }; -PyMODINIT_FUNC initv4l2capture(void) +PyMODINIT_FUNC initvideolive(void) { Device_manager_type.tp_new = PyType_GenericNew; Video_out_manager_type.tp_new = PyType_GenericNew; From 68482c9ce565169bfb8d90099d0ecf8fc42c14bd Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Fri, 15 Nov 2013 10:16:41 +0000 Subject: [PATCH 077/256] Rename video input class --- libvideolive.cpp | 32 ++++++++++++++++---------------- v4l2capture.cpp | 40 ++++++++++++++++++++-------------------- v4l2capture.h | 8 ++++---- 3 files changed, 40 insertions(+), 40 deletions(-) diff --git a/libvideolive.cpp b/libvideolive.cpp index 95b4abb..8f05bda 100644 --- a/libvideolive.cpp +++ b/libvideolive.cpp @@ -30,7 +30,7 @@ typedef struct { class Device_manager_cl{ public: PyObject_HEAD - std::map *threadArgStore; + std::map *threadArgStore; }; typedef Device_manager_cl Device_manager; @@ -82,7 +82,7 @@ PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) static void Device_manager_dealloc(Device_manager *self) { //Stop high level threads - for(std::map::iterator it = self->threadArgStore->begin(); + for(std::map::iterator it = self->threadArgStore->begin(); it != self->threadArgStore->end(); it++) { PyObject *args = PyTuple_New(1); @@ -98,7 +98,7 @@ static void Device_manager_dealloc(Device_manager *self) static int Device_manager_init(Device_manager *self, PyObject *args, PyObject *kwargs) { - self->threadArgStore = new std::map; + self->threadArgStore = new std::map; return 0; } @@ -113,7 +113,7 @@ static PyObject *Device_manager_open(Device_manager *self, PyObject *args) } //Check this device has not already been opened - std::map::iterator it = self->threadArgStore->find(devarg); + std::map::iterator it = self->threadArgStore->find(devarg); if(it!=self->threadArgStore->end()) { PyErr_Format(PyExc_RuntimeError, "Device already opened."); @@ -121,9 +121,9 @@ static PyObject *Device_manager_open(Device_manager *self, PyObject *args) } pthread_t thread; - Device_manager_Worker_thread_args *threadArgs = new Device_manager_Worker_thread_args(devarg); + Video_in_Manager *threadArgs = new Video_in_Manager(devarg); (*self->threadArgStore)[devarg] = threadArgs; - pthread_create(&thread, NULL, Device_manager_Worker_thread, threadArgs); + pthread_create(&thread, NULL, Video_in_Worker_thread, threadArgs); threadArgs->OpenDevice(); @@ -144,14 +144,14 @@ static PyObject *Device_manager_set_format(Device_manager *self, PyObject *args) } //Check this device is valid - std::map::iterator it = self->threadArgStore->find(devarg); + std::map::iterator it = self->threadArgStore->find(devarg); if(it==self->threadArgStore->end()) { PyErr_Format(PyExc_RuntimeError, "Device already not ready."); Py_RETURN_NONE; } - class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; + class Video_in_Manager *threadArgs = (*self->threadArgStore)[devarg]; threadArgs->SetFormat(fmt, size_x, size_y); Py_RETURN_NONE; @@ -176,14 +176,14 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) } //Check this device is valid - std::map::iterator it = self->threadArgStore->find(devarg); + std::map::iterator it = self->threadArgStore->find(devarg); if(it==self->threadArgStore->end()) { PyErr_Format(PyExc_RuntimeError, "Device already not ready."); Py_RETURN_NONE; } - class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; + class Video_in_Manager *threadArgs = (*self->threadArgStore)[devarg]; threadArgs->StartDevice(buffer_count); Py_RETURN_NONE; @@ -201,14 +201,14 @@ static PyObject *Device_manager_Get_frame(Device_manager *self, PyObject *args) } //Check this device is valid - std::map::iterator it = self->threadArgStore->find(devarg); + std::map::iterator it = self->threadArgStore->find(devarg); if(it==self->threadArgStore->end()) { PyErr_Format(PyExc_RuntimeError, "Device already not ready."); Py_RETURN_NONE; } - class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; + class Video_in_Manager *threadArgs = (*self->threadArgStore)[devarg]; unsigned char *buffOut = NULL; class FrameMetaData metaOut; @@ -246,14 +246,14 @@ static PyObject *Device_manager_stop(Device_manager *self, PyObject *args) } //Check this device is valid - std::map::iterator it = self->threadArgStore->find(devarg); + std::map::iterator it = self->threadArgStore->find(devarg); if(it==self->threadArgStore->end()) { PyErr_Format(PyExc_RuntimeError, "Device already not ready."); Py_RETURN_NONE; } - class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; + class Video_in_Manager *threadArgs = (*self->threadArgStore)[devarg]; threadArgs->StopDevice(); Py_RETURN_NONE; @@ -270,14 +270,14 @@ static PyObject *Device_manager_close(Device_manager *self, PyObject *args) } //Check this device is valid - std::map::iterator it = self->threadArgStore->find(devarg); + std::map::iterator it = self->threadArgStore->find(devarg); if(it==self->threadArgStore->end()) { PyErr_Format(PyExc_RuntimeError, "Device already not ready."); Py_RETURN_NONE; } - class Device_manager_Worker_thread_args *threadArgs = (*self->threadArgStore)[devarg]; + class Video_in_Manager *threadArgs = (*self->threadArgStore)[devarg]; threadArgs->CloseDevice(); //Stop worker thread diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 8c4fbb0..67c1f61 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -84,7 +84,7 @@ int my_ioctl(int fd, int request, void *arg, int utimeout = -1) // *************************************************************************** -Device_manager_Worker_thread_args::Device_manager_Worker_thread_args(const char *devNameIn) +Video_in_Manager::Video_in_Manager(const char *devNameIn) { stop = 0; stopped = 1; @@ -102,7 +102,7 @@ Device_manager_Worker_thread_args::Device_manager_Worker_thread_args(const char targetFmt = "RGB24"; } -Device_manager_Worker_thread_args::~Device_manager_Worker_thread_args() +Video_in_Manager::~Video_in_Manager() { if(deviceStarted) { @@ -126,14 +126,14 @@ Device_manager_Worker_thread_args::~Device_manager_Worker_thread_args() pthread_mutex_destroy(&lock); } -void Device_manager_Worker_thread_args::Stop() +void Video_in_Manager::Stop() { pthread_mutex_lock(&this->lock); this->stop = 1; pthread_mutex_unlock(&this->lock); } -void Device_manager_Worker_thread_args::WaitForStop() +void Video_in_Manager::WaitForStop() { while(1) { @@ -146,14 +146,14 @@ void Device_manager_Worker_thread_args::WaitForStop() } } -void Device_manager_Worker_thread_args::OpenDevice() +void Video_in_Manager::OpenDevice() { pthread_mutex_lock(&this->lock); this->openDeviceFlag.push_back(this->devName.c_str()); pthread_mutex_unlock(&this->lock); } -void Device_manager_Worker_thread_args::SetFormat(const char *fmt, int width, int height) +void Video_in_Manager::SetFormat(const char *fmt, int width, int height) { class SetFormatParams params; params.fmt = fmt; @@ -165,28 +165,28 @@ void Device_manager_Worker_thread_args::SetFormat(const char *fmt, int width, in pthread_mutex_unlock(&this->lock); } -void Device_manager_Worker_thread_args::StartDevice(int buffer_count) +void Video_in_Manager::StartDevice(int buffer_count) { pthread_mutex_lock(&this->lock); this->startDeviceFlag.push_back(buffer_count); pthread_mutex_unlock(&this->lock); } -void Device_manager_Worker_thread_args::StopDevice() +void Video_in_Manager::StopDevice() { pthread_mutex_lock(&this->lock); this->stopDeviceFlag = 1; pthread_mutex_unlock(&this->lock); } -void Device_manager_Worker_thread_args::CloseDevice() +void Video_in_Manager::CloseDevice() { pthread_mutex_lock(&this->lock); this->closeDeviceFlag = 1; pthread_mutex_unlock(&this->lock); } -int Device_manager_Worker_thread_args::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) +int Video_in_Manager::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) { pthread_mutex_lock(&this->lock); if(this->decodedFrameBuff.size()==0) @@ -207,7 +207,7 @@ int Device_manager_Worker_thread_args::GetFrame(unsigned char **buffOut, class F return 1; } -int Device_manager_Worker_thread_args::ReadFrame() +int Video_in_Manager::ReadFrame() { if(this->fd<0) throw std::runtime_error("File not open"); @@ -274,7 +274,7 @@ int Device_manager_Worker_thread_args::ReadFrame() return 1; } -int Device_manager_Worker_thread_args::OpenDeviceInternal() +int Video_in_Manager::OpenDeviceInternal() { if(verbose) printf("OpenDeviceInternal\n"); //Open the video device. @@ -290,7 +290,7 @@ int Device_manager_Worker_thread_args::OpenDeviceInternal() return 1; } -int Device_manager_Worker_thread_args::SetFormatInternal(class SetFormatParams &args) +int Video_in_Manager::SetFormatInternal(class SetFormatParams &args) { if(verbose) printf("SetFormatInternal\n"); //int size_x, int size_y, const char *fmt; @@ -329,7 +329,7 @@ int Device_manager_Worker_thread_args::SetFormatInternal(class SetFormatParams & return 1; } -int Device_manager_Worker_thread_args::GetFormatInternal() +int Video_in_Manager::GetFormatInternal() { struct v4l2_format format; format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; @@ -371,7 +371,7 @@ int Device_manager_Worker_thread_args::GetFormatInternal() return 1; } -int Device_manager_Worker_thread_args::StartDeviceInternal(int buffer_count = 10) +int Video_in_Manager::StartDeviceInternal(int buffer_count = 10) { if(verbose) printf("StartDeviceInternal\n"); //Check this device has not already been start @@ -479,7 +479,7 @@ int Device_manager_Worker_thread_args::StartDeviceInternal(int buffer_count = 10 return 1; } -void Device_manager_Worker_thread_args::StopDeviceInternal() +void Video_in_Manager::StopDeviceInternal() { if(verbose) printf("StopDeviceInternal\n"); if(this->fd==-1) @@ -499,7 +499,7 @@ void Device_manager_Worker_thread_args::StopDeviceInternal() this->deviceStarted = 0; } -int Device_manager_Worker_thread_args::CloseDeviceInternal() +int Video_in_Manager::CloseDeviceInternal() { if(verbose) printf("CloseDeviceInternal\n"); if(this->fd == -1) @@ -526,7 +526,7 @@ int Device_manager_Worker_thread_args::CloseDeviceInternal() return 1; } -void Device_manager_Worker_thread_args::Run() +void Video_in_Manager::Run() { if(verbose) printf("Thread started: %s\n", this->devName.c_str()); int running = 1; @@ -611,9 +611,9 @@ void Device_manager_Worker_thread_args::Run() pthread_mutex_unlock(&this->lock); } -void *Device_manager_Worker_thread(void *arg) +void *Video_in_Worker_thread(void *arg) { - class Device_manager_Worker_thread_args *argobj = (class Device_manager_Worker_thread_args*) arg; + class Video_in_Manager *argobj = (class Video_in_Manager*) arg; argobj->Run(); return NULL; diff --git a/v4l2capture.h b/v4l2capture.h index 6de27e5..f7f32d1 100644 --- a/v4l2capture.h +++ b/v4l2capture.h @@ -113,7 +113,7 @@ class SetFormatParams int my_ioctl(int fd, int request, void *arg, int utimeout); -class Device_manager_Worker_thread_args +class Video_in_Manager { public: //Device_manager *self; @@ -139,8 +139,8 @@ class Device_manager_Worker_thread_args std::vector decodedFrameMetaBuff; unsigned decodedFrameBuffMaxSize; - Device_manager_Worker_thread_args(const char *devNameIn); - virtual ~Device_manager_Worker_thread_args(); + Video_in_Manager(const char *devNameIn); + virtual ~Video_in_Manager(); void Stop(); void WaitForStop(); void OpenDevice(); @@ -163,7 +163,7 @@ class Device_manager_Worker_thread_args void Run(); }; -void *Device_manager_Worker_thread(void *arg); +void *Video_in_Worker_thread(void *arg); // ********************************************************************** From ddd20aec81052e280b6b85928a689d4d38d6afdf Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Fri, 15 Nov 2013 10:47:31 +0000 Subject: [PATCH 078/256] Use a common video input base class --- libvideolive.cpp | 28 ++++++++++++++-------------- v4l2capture.cpp | 2 +- v4l2capture.h | 45 ++------------------------------------------- 3 files changed, 17 insertions(+), 58 deletions(-) diff --git a/libvideolive.cpp b/libvideolive.cpp index 8f05bda..6e69d4c 100644 --- a/libvideolive.cpp +++ b/libvideolive.cpp @@ -30,7 +30,7 @@ typedef struct { class Device_manager_cl{ public: PyObject_HEAD - std::map *threadArgStore; + std::map *threadArgStore; }; typedef Device_manager_cl Device_manager; @@ -82,7 +82,7 @@ PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) static void Device_manager_dealloc(Device_manager *self) { //Stop high level threads - for(std::map::iterator it = self->threadArgStore->begin(); + for(std::map::iterator it = self->threadArgStore->begin(); it != self->threadArgStore->end(); it++) { PyObject *args = PyTuple_New(1); @@ -98,7 +98,7 @@ static void Device_manager_dealloc(Device_manager *self) static int Device_manager_init(Device_manager *self, PyObject *args, PyObject *kwargs) { - self->threadArgStore = new std::map; + self->threadArgStore = new std::map; return 0; } @@ -113,7 +113,7 @@ static PyObject *Device_manager_open(Device_manager *self, PyObject *args) } //Check this device has not already been opened - std::map::iterator it = self->threadArgStore->find(devarg); + std::map::iterator it = self->threadArgStore->find(devarg); if(it!=self->threadArgStore->end()) { PyErr_Format(PyExc_RuntimeError, "Device already opened."); @@ -144,14 +144,14 @@ static PyObject *Device_manager_set_format(Device_manager *self, PyObject *args) } //Check this device is valid - std::map::iterator it = self->threadArgStore->find(devarg); + std::map::iterator it = self->threadArgStore->find(devarg); if(it==self->threadArgStore->end()) { PyErr_Format(PyExc_RuntimeError, "Device already not ready."); Py_RETURN_NONE; } - class Video_in_Manager *threadArgs = (*self->threadArgStore)[devarg]; + class Base_Video_In *threadArgs = (*self->threadArgStore)[devarg]; threadArgs->SetFormat(fmt, size_x, size_y); Py_RETURN_NONE; @@ -176,14 +176,14 @@ static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) } //Check this device is valid - std::map::iterator it = self->threadArgStore->find(devarg); + std::map::iterator it = self->threadArgStore->find(devarg); if(it==self->threadArgStore->end()) { PyErr_Format(PyExc_RuntimeError, "Device already not ready."); Py_RETURN_NONE; } - class Video_in_Manager *threadArgs = (*self->threadArgStore)[devarg]; + class Base_Video_In *threadArgs = (*self->threadArgStore)[devarg]; threadArgs->StartDevice(buffer_count); Py_RETURN_NONE; @@ -201,14 +201,14 @@ static PyObject *Device_manager_Get_frame(Device_manager *self, PyObject *args) } //Check this device is valid - std::map::iterator it = self->threadArgStore->find(devarg); + std::map::iterator it = self->threadArgStore->find(devarg); if(it==self->threadArgStore->end()) { PyErr_Format(PyExc_RuntimeError, "Device already not ready."); Py_RETURN_NONE; } - class Video_in_Manager *threadArgs = (*self->threadArgStore)[devarg]; + class Base_Video_In *threadArgs = (*self->threadArgStore)[devarg]; unsigned char *buffOut = NULL; class FrameMetaData metaOut; @@ -246,14 +246,14 @@ static PyObject *Device_manager_stop(Device_manager *self, PyObject *args) } //Check this device is valid - std::map::iterator it = self->threadArgStore->find(devarg); + std::map::iterator it = self->threadArgStore->find(devarg); if(it==self->threadArgStore->end()) { PyErr_Format(PyExc_RuntimeError, "Device already not ready."); Py_RETURN_NONE; } - class Video_in_Manager *threadArgs = (*self->threadArgStore)[devarg]; + class Base_Video_In *threadArgs = (*self->threadArgStore)[devarg]; threadArgs->StopDevice(); Py_RETURN_NONE; @@ -270,14 +270,14 @@ static PyObject *Device_manager_close(Device_manager *self, PyObject *args) } //Check this device is valid - std::map::iterator it = self->threadArgStore->find(devarg); + std::map::iterator it = self->threadArgStore->find(devarg); if(it==self->threadArgStore->end()) { PyErr_Format(PyExc_RuntimeError, "Device already not ready."); Py_RETURN_NONE; } - class Video_in_Manager *threadArgs = (*self->threadArgStore)[devarg]; + class Base_Video_In *threadArgs = (*self->threadArgStore)[devarg]; threadArgs->CloseDevice(); //Stop worker thread diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 67c1f61..cd8ae0d 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -82,7 +82,7 @@ int my_ioctl(int fd, int request, void *arg, int utimeout = -1) } } -// *************************************************************************** +// ************************************************************************** Video_in_Manager::Video_in_Manager(const char *devNameIn) { diff --git a/v4l2capture.h b/v4l2capture.h index f7f32d1..a1be72c 100644 --- a/v4l2capture.h +++ b/v4l2capture.h @@ -12,6 +12,7 @@ #include #include #include +#include "base.h" struct buffer { void *start; @@ -24,48 +25,6 @@ struct capability { }; -class FrameMetaData -{ -public: - std::string fmt; - int width; - int height; - unsigned buffLen; - unsigned long sequence; - unsigned long tv_sec; - unsigned long tv_usec; - - FrameMetaData() - { - width = 0; - height = 0; - buffLen = 0; - sequence = 0; - tv_sec = 0; - tv_usec = 0; - } - - FrameMetaData(const FrameMetaData &in) - { - FrameMetaData::operator=(in); - } - - const FrameMetaData &operator=(const FrameMetaData &in) - { - width = in.width; - height = in.height; - fmt = in.fmt; - buffLen = in.buffLen; - sequence = in.sequence; - tv_sec = in.tv_sec; - tv_usec = in.tv_usec; - return *this; - } - -}; - -// ********************************************************************** - class SetFormatParams { public: @@ -113,7 +72,7 @@ class SetFormatParams int my_ioctl(int fd, int request, void *arg, int utimeout); -class Video_in_Manager +class Video_in_Manager : public Base_Video_In { public: //Device_manager *self; From 181a5396577a75d133969b13dba507c3b740c6a2 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Fri, 15 Nov 2013 10:47:41 +0000 Subject: [PATCH 079/256] Use a common video input base class --- base.h | 63 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) create mode 100644 base.h diff --git a/base.h b/base.h new file mode 100644 index 0000000..9948f1e --- /dev/null +++ b/base.h @@ -0,0 +1,63 @@ + +#ifndef BASE_H +#define BASE_H + +#include + +class FrameMetaData +{ +public: + std::string fmt; + int width; + int height; + unsigned buffLen; + unsigned long sequence; + unsigned long tv_sec; + unsigned long tv_usec; + + FrameMetaData() + { + width = 0; + height = 0; + buffLen = 0; + sequence = 0; + tv_sec = 0; + tv_usec = 0; + } + + FrameMetaData(const FrameMetaData &in) + { + FrameMetaData::operator=(in); + } + + const FrameMetaData &operator=(const FrameMetaData &in) + { + width = in.width; + height = in.height; + fmt = in.fmt; + buffLen = in.buffLen; + sequence = in.sequence; + tv_sec = in.tv_sec; + tv_usec = in.tv_usec; + return *this; + } + +}; + +// ********************************************************************** + +class Base_Video_In +{ +public: + virtual void Stop() {}; + virtual void WaitForStop() {}; + virtual void OpenDevice() {}; + virtual void SetFormat(const char *fmt, int width, int height) {}; + virtual void StartDevice(int buffer_count) {}; + virtual void StopDevice() {}; + virtual void CloseDevice() {}; + virtual int GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) {return 0;}; +}; + +#endif //BASE_H + From be423578480641de5724232a94df79f31aa58485 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Fri, 15 Nov 2013 11:02:10 +0000 Subject: [PATCH 080/256] Refactor video output --- libvideolive.cpp | 2 +- setup.py | 2 +- v4l2out.cpp | 584 +++++++++++++++++------------------------------ v4l2out.h | 46 ++-- videoout.cpp | 146 ++++++++++++ videoout.h | 27 +++ 6 files changed, 413 insertions(+), 394 deletions(-) create mode 100644 videoout.cpp create mode 100644 videoout.h diff --git a/libvideolive.cpp b/libvideolive.cpp index 6e69d4c..7b1356f 100644 --- a/libvideolive.cpp +++ b/libvideolive.cpp @@ -18,7 +18,7 @@ #include #include "pixfmt.h" #include "v4l2capture.h" -#include "v4l2out.h" +#include "videoout.h" typedef struct { PyObject_HEAD diff --git a/setup.py b/setup.py index b286d14..f351ea8 100755 --- a/setup.py +++ b/setup.py @@ -24,6 +24,6 @@ "License :: GPL", "Programming Language :: C++"], ext_modules = [ - Extension("videolive", ["v4l2capture.cpp", "v4l2out.cpp", "pixfmt.cpp", "libvideolive.cpp"], + Extension("videolive", ["v4l2capture.cpp", "v4l2out.cpp", "pixfmt.cpp", "libvideolive.cpp", "videoout.cpp"], libraries = ["v4l2", "pthread", "jpeg"])]) diff --git a/v4l2out.cpp b/v4l2out.cpp index 4ea18cf..11a1bc8 100644 --- a/v4l2out.cpp +++ b/v4l2out.cpp @@ -64,446 +64,280 @@ class SendFrameArgs } }; -class Video_out +Video_out::Video_out(const char *devNameIn) { -public: - Video_out_manager *self; - std::string devName; - int stop; - int stopped; - pthread_mutex_t lock; - int verbose; - std::vector sendFrameArgs; - std::vector sendFrameBuffer; - struct timespec lastFrameTime; - int fdwr; - int framesize; - unsigned char *currentFrame; - int outputWidth; - int outputHeight; - std::string outputPxFmt; - - Video_out(const char *devNameIn) - { - this->fdwr = 0; - framesize = 0; - stop = 0; - stopped = 1; - verbose = 1; - this->devName = devNameIn; - pthread_mutex_init(&lock, NULL); - currentFrame = NULL; - outputWidth = 640; - outputHeight = 480; - outputPxFmt = "YUYV"; - - clock_gettime(CLOCK_MONOTONIC, &lastFrameTime); - - struct sigevent sevp; - memset(&sevp, 0, sizeof(struct sigevent)); - sevp.sigev_notify = SIGEV_NONE; + this->fdwr = 0; + framesize = 0; + stop = 0; + stopped = 1; + verbose = 1; + this->devName = devNameIn; + pthread_mutex_init(&lock, NULL); + currentFrame = NULL; + outputWidth = 640; + outputHeight = 480; + outputPxFmt = "YUYV"; + + clock_gettime(CLOCK_MONOTONIC, &lastFrameTime); + + struct sigevent sevp; + memset(&sevp, 0, sizeof(struct sigevent)); + sevp.sigev_notify = SIGEV_NONE; - } +} - virtual ~Video_out() +Video_out::~Video_out() +{ + for(unsigned i=0; isendFrameBuffer.size(); i++) { - for(unsigned i=0; isendFrameBuffer.size(); i++) - { - delete [] this->sendFrameBuffer[i]; - } - this->sendFrameBuffer.clear(); - - if(this->currentFrame!=NULL) - delete [] this->currentFrame; - this->currentFrame = NULL; - - pthread_mutex_destroy(&lock); + delete [] this->sendFrameBuffer[i]; } + this->sendFrameBuffer.clear(); -protected: + if(this->currentFrame!=NULL) + delete [] this->currentFrame; + this->currentFrame = NULL; - void SendFrameInternal() - { - const char* buff = NULL; - class SendFrameArgs args; - - pthread_mutex_lock(&this->lock); - if(this->sendFrameBuffer.size()>=1) - { - //Get oldest frame - buff = this->sendFrameBuffer[0]; - args = this->sendFrameArgs[0]; + pthread_mutex_destroy(&lock); +} - //Remove frame from buffer - this->sendFrameBuffer.erase(this->sendFrameBuffer.begin()); - this->sendFrameArgs.erase(this->sendFrameArgs.begin()); - } - pthread_mutex_unlock(&this->lock); +void Video_out::SendFrameInternal() +{ + const char* buff = NULL; + class SendFrameArgs args; + + pthread_mutex_lock(&this->lock); + if(this->sendFrameBuffer.size()>=1) + { + //Get oldest frame + buff = this->sendFrameBuffer[0]; + args = this->sendFrameArgs[0]; - //Check time since previous frame send - struct timespec tp; - clock_gettime(CLOCK_MONOTONIC, &tp); - long int secSinceLastFrame = tp.tv_sec - this->lastFrameTime.tv_sec; - long int nsecSinceLastFrame = tp.tv_nsec - this->lastFrameTime.tv_nsec; - if(nsecSinceLastFrame < 0) - { - secSinceLastFrame -= 1; - nsecSinceLastFrame *= -1; - } + //Remove frame from buffer + this->sendFrameBuffer.erase(this->sendFrameBuffer.begin()); + this->sendFrameArgs.erase(this->sendFrameArgs.begin()); + } + pthread_mutex_unlock(&this->lock); + + //Check time since previous frame send + struct timespec tp; + clock_gettime(CLOCK_MONOTONIC, &tp); + long int secSinceLastFrame = tp.tv_sec - this->lastFrameTime.tv_sec; + long int nsecSinceLastFrame = tp.tv_nsec - this->lastFrameTime.tv_nsec; + if(nsecSinceLastFrame < 0) + { + secSinceLastFrame -= 1; + nsecSinceLastFrame *= -1; + } - if(buff != NULL) + if(buff != NULL) + { + //Convert new frame to correct size and pixel format + assert(strcmp(args.pxFmt.c_str(), "RGB24")==0); + unsigned resizeBuffLen = this->outputWidth * this->outputHeight * 3; + char *buffResize = new char[resizeBuffLen]; + memset(buffResize, 0, resizeBuffLen); + for(unsigned x = 0; x < this->outputWidth; x++) { - //Convert new frame to correct size and pixel format - assert(strcmp(args.pxFmt.c_str(), "RGB24")==0); - unsigned resizeBuffLen = this->outputWidth * this->outputHeight * 3; - char *buffResize = new char[resizeBuffLen]; - memset(buffResize, 0, resizeBuffLen); - for(unsigned x = 0; x < this->outputWidth; x++) + if (x >= args.width) continue; + for(unsigned y = 0; y < this->outputHeight; y++) { - if (x >= args.width) continue; - for(unsigned y = 0; y < this->outputHeight; y++) - { - if (y >= args.height) continue; - buffResize[y * this->outputWidth * 3 + x * 3] = buff[y * args.width * 3 + x * 3]; - buffResize[y * this->outputWidth * 3 + x * 3 + 1] = buff[y * args.width * 3 + x * 3 + 1]; - buffResize[y * this->outputWidth * 3 + x * 3 + 2] = buff[y * args.width * 3 + x * 3 + 2]; - } + if (y >= args.height) continue; + buffResize[y * this->outputWidth * 3 + x * 3] = buff[y * args.width * 3 + x * 3]; + buffResize[y * this->outputWidth * 3 + x * 3 + 1] = buff[y * args.width * 3 + x * 3 + 1]; + buffResize[y * this->outputWidth * 3 + x * 3 + 2] = buff[y * args.width * 3 + x * 3 + 2]; } + } - unsigned char *buffOut = NULL; - unsigned buffOutLen = 0; - DecodeFrame((unsigned char *)buffResize, resizeBuffLen, - args.pxFmt.c_str(), - this->outputWidth, this->outputHeight, - this->outputPxFmt.c_str(), - &buffOut, - &buffOutLen); - - assert(buffOutLen == this->framesize); + unsigned char *buffOut = NULL; + unsigned buffOutLen = 0; + DecodeFrame((unsigned char *)buffResize, resizeBuffLen, + args.pxFmt.c_str(), + this->outputWidth, this->outputHeight, + this->outputPxFmt.c_str(), + &buffOut, + &buffOutLen); - //Replace current frame with new encoded frame - if(this->currentFrame!=NULL) - delete [] this->currentFrame; - this->currentFrame = buffOut; + assert(buffOutLen == this->framesize); - delete [] buffResize; + //Replace current frame with new encoded frame + if(this->currentFrame!=NULL) + delete [] this->currentFrame; + this->currentFrame = buffOut; - } + delete [] buffResize; - //If we have no data, initialise with a blank frame - if(this->currentFrame==NULL) - { - this->currentFrame = new unsigned char[this->framesize]; - memset(this->currentFrame, 0, this->framesize); - } + } - int timeElapsed = secSinceLastFrame>=1; + //If we have no data, initialise with a blank frame + if(this->currentFrame==NULL) + { + this->currentFrame = new unsigned char[this->framesize]; + memset(this->currentFrame, 0, this->framesize); + } - if(timeElapsed || buff != NULL) - { - //Send frame update due to time elapse - if(timeElapsed) - printf("Write frame due to elapse time\n"); - write(this->fdwr, this->currentFrame, this->framesize); + int timeElapsed = secSinceLastFrame>=1; - this->lastFrameTime = tp; - } + if(timeElapsed || buff != NULL) + { + //Send frame update due to time elapse + if(timeElapsed) + printf("Write frame due to elapse time\n"); + write(this->fdwr, this->currentFrame, this->framesize); - //Free image buffer - if(buff!=NULL) - delete [] buff; + this->lastFrameTime = tp; } -public: - void Run() - { - if(verbose) printf("Thread started: %s\n", this->devName.c_str()); - int running = 1; - pthread_mutex_lock(&this->lock); - this->stopped = 0; - pthread_mutex_unlock(&this->lock); - - this->fdwr = open(this->devName.c_str(), O_RDWR); - assert(fdwr >= 0); + //Free image buffer + if(buff!=NULL) + delete [] buff; +} - struct v4l2_capability vid_caps; - int ret_code = ioctl(this->fdwr, VIDIOC_QUERYCAP, &vid_caps); - assert(ret_code != -1); +void Video_out::Run() +{ + if(verbose) printf("Thread started: %s\n", this->devName.c_str()); + int running = 1; + pthread_mutex_lock(&this->lock); + this->stopped = 0; + pthread_mutex_unlock(&this->lock); + + this->fdwr = open(this->devName.c_str(), O_RDWR); + assert(fdwr >= 0); + + struct v4l2_capability vid_caps; + int ret_code = ioctl(this->fdwr, VIDIOC_QUERYCAP, &vid_caps); + assert(ret_code != -1); - struct v4l2_format vid_format; - memset(&vid_format, 0, sizeof(vid_format)); + struct v4l2_format vid_format; + memset(&vid_format, 0, sizeof(vid_format)); - ret_code = ioctl(this->fdwr, VIDIOC_G_FMT, &vid_format); - if(verbose)print_format(&vid_format); + ret_code = ioctl(this->fdwr, VIDIOC_G_FMT, &vid_format); + if(verbose)print_format(&vid_format); - int lw = 0; - int fw = 0; - if(strcmp(this->outputPxFmt.c_str(), "YVU420")==0) - { - lw = this->outputWidth; /* ??? */ - fw = ROUND_UP_4 (this->outputWidth) * ROUND_UP_2 (this->outputHeight); - fw += 2 * ((ROUND_UP_8 (this->outputWidth) / 2) * (ROUND_UP_2 (this->outputHeight) / 2)); - } - - if(strcmp(this->outputPxFmt.c_str(), "YUYV")==0 - || strcmp(this->outputPxFmt.c_str(), "UYVY")==0 ) - { - lw = (ROUND_UP_2 (this->outputWidth) * 2); - fw = lw * this->outputHeight; - } + int lw = 0; + int fw = 0; + if(strcmp(this->outputPxFmt.c_str(), "YVU420")==0) + { + lw = this->outputWidth; /* ??? */ + fw = ROUND_UP_4 (this->outputWidth) * ROUND_UP_2 (this->outputHeight); + fw += 2 * ((ROUND_UP_8 (this->outputWidth) / 2) * (ROUND_UP_2 (this->outputHeight) / 2)); + } - vid_format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; - vid_format.fmt.pix.width = this->outputWidth; - vid_format.fmt.pix.height = this->outputHeight; - vid_format.fmt.pix.pixelformat = 0; - if(strcmp(this->outputPxFmt.c_str(), "YUYV")==0) - vid_format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; - if(strcmp(this->outputPxFmt.c_str(), "UYVY")==0) - vid_format.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY; - if(strcmp(this->outputPxFmt.c_str(), "YVU420")==0) - vid_format.fmt.pix.pixelformat = V4L2_PIX_FMT_YVU420; - if(strcmp(this->outputPxFmt.c_str(), "RGB24")==0) - vid_format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; + if(strcmp(this->outputPxFmt.c_str(), "YUYV")==0 + || strcmp(this->outputPxFmt.c_str(), "UYVY")==0 ) + { + lw = (ROUND_UP_2 (this->outputWidth) * 2); + fw = lw * this->outputHeight; + } - vid_format.fmt.pix.sizeimage = lw; - vid_format.fmt.pix.field = V4L2_FIELD_NONE; - vid_format.fmt.pix.bytesperline = fw; - vid_format.fmt.pix.colorspace = V4L2_COLORSPACE_SRGB; + vid_format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; + vid_format.fmt.pix.width = this->outputWidth; + vid_format.fmt.pix.height = this->outputHeight; + vid_format.fmt.pix.pixelformat = 0; + if(strcmp(this->outputPxFmt.c_str(), "YUYV")==0) + vid_format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; + if(strcmp(this->outputPxFmt.c_str(), "UYVY")==0) + vid_format.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY; + if(strcmp(this->outputPxFmt.c_str(), "YVU420")==0) + vid_format.fmt.pix.pixelformat = V4L2_PIX_FMT_YVU420; + if(strcmp(this->outputPxFmt.c_str(), "RGB24")==0) + vid_format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; - if(verbose)print_format(&vid_format); + vid_format.fmt.pix.sizeimage = lw; + vid_format.fmt.pix.field = V4L2_FIELD_NONE; + vid_format.fmt.pix.bytesperline = fw; + vid_format.fmt.pix.colorspace = V4L2_COLORSPACE_SRGB; - ret_code = ioctl(this->fdwr, VIDIOC_S_FMT, &vid_format); + if(verbose)print_format(&vid_format); - assert(ret_code != -1); + ret_code = ioctl(this->fdwr, VIDIOC_S_FMT, &vid_format); - this->framesize = vid_format.fmt.pix.sizeimage; - int linewidth = vid_format.fmt.pix.bytesperline; - if(verbose)printf("frame: format=%s\tsize=%d\n", this->outputPxFmt.c_str(), framesize); + assert(ret_code != -1); - try - { - while(running) - { - usleep(1000); + this->framesize = vid_format.fmt.pix.sizeimage; + int linewidth = vid_format.fmt.pix.bytesperline; + if(verbose)printf("frame: format=%s\tsize=%d\n", this->outputPxFmt.c_str(), framesize); - this->SendFrameInternal(); + try + { + while(running) + { + usleep(1000); - pthread_mutex_lock(&this->lock); - try - { + this->SendFrameInternal(); - running = !this->stop; - } - catch(std::exception &err) - { - if(verbose) printf("An exception has occured: %s\n", err.what()); - running = 0; - } - pthread_mutex_unlock(&this->lock); - } + pthread_mutex_lock(&this->lock); + try + { + running = !this->stop; } catch(std::exception &err) { if(verbose) printf("An exception has occured: %s\n", err.what()); + running = 0; } - - if(verbose) printf("Thread stopping\n"); - pthread_mutex_lock(&this->lock); - this->stopped = 1; - pthread_mutex_unlock(&this->lock); - } - - void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) - { - pthread_mutex_lock(&this->lock); - if(verbose) printf("SendFrame %i %s %i %i\n", imgLen, pxFmt, width, height); - - //Take a shallow copy of the buffer and keep for worker thread - char *buffCpy = new char[imgLen]; - memcpy(buffCpy, imgIn, imgLen); - this->sendFrameBuffer.push_back(buffCpy); - - class SendFrameArgs sendFrameArgsTmp; - sendFrameArgsTmp.imgLen = imgLen; - sendFrameArgsTmp.pxFmt = pxFmt; - sendFrameArgsTmp.width = width; - sendFrameArgsTmp.height = height; - this->sendFrameArgs.push_back(sendFrameArgsTmp); - pthread_mutex_unlock(&this->lock); } - - void Stop() - { - pthread_mutex_lock(&this->lock); - this->stop = 1; - pthread_mutex_unlock(&this->lock); } - - int WaitForStop() + catch(std::exception &err) { - this->Stop(); - while(1) - { - pthread_mutex_lock(&this->lock); - int s = this->stopped; - pthread_mutex_unlock(&this->lock); - - if(s) return 1; - usleep(10000); - } + if(verbose) printf("An exception has occured: %s\n", err.what()); } -}; -void *Video_out_manager_Worker_thread(void *arg) -{ - class Video_out *argobj = (class Video_out*) arg; - argobj->Run(); - - return NULL; + if(verbose) printf("Thread stopping\n"); + pthread_mutex_lock(&this->lock); + this->stopped = 1; + pthread_mutex_unlock(&this->lock); } -// ***************************************************************** - -int Video_out_manager_init(Video_out_manager *self, PyObject *args, - PyObject *kwargs) +void Video_out::SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) { - self->threads = new std::map; - return 0; + pthread_mutex_lock(&this->lock); + if(verbose) printf("SendFrame %i %s %i %i\n", imgLen, pxFmt, width, height); + + //Take a shallow copy of the buffer and keep for worker thread + char *buffCpy = new char[imgLen]; + memcpy(buffCpy, imgIn, imgLen); + this->sendFrameBuffer.push_back(buffCpy); + + class SendFrameArgs sendFrameArgsTmp; + sendFrameArgsTmp.imgLen = imgLen; + sendFrameArgsTmp.pxFmt = pxFmt; + sendFrameArgsTmp.width = width; + sendFrameArgsTmp.height = height; + this->sendFrameArgs.push_back(sendFrameArgsTmp); + + pthread_mutex_unlock(&this->lock); } -void Video_out_manager_dealloc(Video_out_manager *self) +void Video_out::Stop() { - //Stop high level threads - for(std::map::iterator it = self->threads->begin(); - it != self->threads->end(); it++) - { - it->second->Stop(); - it->second->WaitForStop(); - } - - delete self->threads; - self->threads = NULL; - self->ob_type->tp_free((PyObject *)self); + pthread_mutex_lock(&this->lock); + this->stop = 1; + pthread_mutex_unlock(&this->lock); } -PyObject *Video_out_manager_open(Video_out_manager *self, PyObject *args) +int Video_out::WaitForStop() { - //Process arguments - const char *devarg = NULL; - const char *pxFmtIn = NULL; - int widthIn = 0; - int heightIn = 0; - - if(!PyArg_ParseTuple(args, "ssii", &devarg, &pxFmtIn, &widthIn, &heightIn)) + this->Stop(); + while(1) { - PyErr_Format(PyExc_RuntimeError, "Incorrect arguments to function."); - Py_RETURN_NONE; - } - - //Create worker thread - pthread_t thread; - Video_out *threadArgs = new Video_out(devarg); - (*self->threads)[devarg] = threadArgs; - threadArgs->self = self; - threadArgs->outputWidth = widthIn; - threadArgs->outputHeight = heightIn; - threadArgs->outputPxFmt = pxFmtIn; - - pthread_create(&thread, NULL, Video_out_manager_Worker_thread, threadArgs); - - Py_RETURN_NONE; -} - -PyObject *Video_out_manager_Send_frame(Video_out_manager *self, PyObject *args) -{ - //printf("Video_out_manager_Send_frame\n"); - //dev = '\\dev\\video0', img, pixel_format, width, height - - //Process arguments - const char *devarg = NULL; - const char *imgIn = NULL; - const char *pxFmtIn = NULL; - int widthIn = 0; - int heightIn = 0; - - if(PyObject_Length(args) < 5) - { - PyErr_Format(PyExc_RuntimeError, "Too few arguments."); - Py_RETURN_NONE; - } - - PyObject *pydev = PyTuple_GetItem(args, 0); - devarg = PyString_AsString(pydev); - - PyObject *pyimg = PyTuple_GetItem(args, 1); - imgIn = PyString_AsString(pyimg); - Py_ssize_t imgLen = PyObject_Length(pyimg); - - PyObject *pyPxFmt = PyTuple_GetItem(args, 2); - pxFmtIn = PyString_AsString(pyPxFmt); - - PyObject *pyWidth = PyTuple_GetItem(args, 3); - widthIn = PyInt_AsLong(pyWidth); - - PyObject *pyHeight = PyTuple_GetItem(args, 4); - heightIn = PyInt_AsLong(pyHeight); - - std::map::iterator it = self->threads->find(devarg); + pthread_mutex_lock(&this->lock); + int s = this->stopped; + pthread_mutex_unlock(&this->lock); - if(it != self->threads->end()) - { - it->second->SendFrame(imgIn, imgLen, pxFmtIn, widthIn, heightIn); - } - else - { - PyErr_Format(PyExc_RuntimeError, "Device not found."); - Py_RETURN_NONE; + if(s) return 1; + usleep(10000); } - - Py_RETURN_NONE; } -PyObject *Video_out_manager_close(Video_out_manager *self, PyObject *args) +void *Video_out_manager_Worker_thread(void *arg) { - //Process arguments - const char *devarg = "/dev/video0"; - if(PyTuple_Size(args) >= 1) - { - PyObject *pydevarg = PyTuple_GetItem(args, 0); - devarg = PyString_AsString(pydevarg); - } - - //Stop worker thread - std::map::iterator it = self->threads->find(devarg); - - if(it != self->threads->end()) - { - it->second->Stop(); - } + class Video_out *argobj = (class Video_out*) arg; + argobj->Run(); - Py_RETURN_NONE; + return NULL; } -PyObject *Video_out_manager_list_devices(Video_out_manager *self) -{ - PyObject *out = PyList_New(0); - const char dir[] = "/dev"; - DIR *dp; - struct dirent *dirp; - if((dp = opendir(dir)) == NULL) { - printf("Error(%d) opening %s\n", errno, dir); - Py_RETURN_NONE; - } - - while ((dirp = readdir(dp)) != NULL) { - if (strncmp(dirp->d_name, "video", 5) != 0) continue; - std::string tmp = "/dev/"; - tmp.append(dirp->d_name); - PyList_Append(out, PyString_FromString(tmp.c_str())); - } - closedir(dp); +// ***************************************************************** - PyList_Sort(out); - return out; -} diff --git a/v4l2out.h b/v4l2out.h index 709cf7c..759a177 100644 --- a/v4l2out.h +++ b/v4l2out.h @@ -1,30 +1,42 @@ #ifndef __V4L2OUT_H__ #define __V4L2OUT_H__ -#include #include #include #include -class Video_out_manager_cl{ +class Video_out +{ public: - PyObject_HEAD - std::map *threads; -}; -typedef Video_out_manager_cl Video_out_manager; - -int Video_out_manager_init(Video_out_manager *self, PyObject *args, - PyObject *kwargs); - -void Video_out_manager_dealloc(Video_out_manager *self); - -PyObject *Video_out_manager_open(Video_out_manager *self, PyObject *args); + std::string devName; + int stop; + int stopped; + pthread_mutex_t lock; + int verbose; + std::vector sendFrameArgs; + std::vector sendFrameBuffer; + struct timespec lastFrameTime; + int fdwr; + int framesize; + unsigned char *currentFrame; + int outputWidth; + int outputHeight; + std::string outputPxFmt; + + Video_out(const char *devNameIn); + virtual ~Video_out(); + +protected: + void SendFrameInternal(); -PyObject *Video_out_manager_Send_frame(Video_out_manager *self, PyObject *args); - -PyObject *Video_out_manager_close(Video_out_manager *self, PyObject *args); +public: + void Run(); + void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height); + void Stop(); + int WaitForStop(); +}; -PyObject *Video_out_manager_list_devices(Video_out_manager *self); +void *Video_out_manager_Worker_thread(void *arg); // ****************************************************************** diff --git a/videoout.cpp b/videoout.cpp new file mode 100644 index 0000000..6372e30 --- /dev/null +++ b/videoout.cpp @@ -0,0 +1,146 @@ + +#include "videoout.h" +#include "v4l2out.h" +#include + +int Video_out_manager_init(Video_out_manager *self, PyObject *args, + PyObject *kwargs) +{ + self->threads = new std::map; + return 0; +} + +void Video_out_manager_dealloc(Video_out_manager *self) +{ + //Stop high level threads + for(std::map::iterator it = self->threads->begin(); + it != self->threads->end(); it++) + { + it->second->Stop(); + it->second->WaitForStop(); + } + + delete self->threads; + self->threads = NULL; + self->ob_type->tp_free((PyObject *)self); +} + +PyObject *Video_out_manager_open(Video_out_manager *self, PyObject *args) +{ + //Process arguments + const char *devarg = NULL; + const char *pxFmtIn = NULL; + int widthIn = 0; + int heightIn = 0; + + if(!PyArg_ParseTuple(args, "ssii", &devarg, &pxFmtIn, &widthIn, &heightIn)) + { + PyErr_Format(PyExc_RuntimeError, "Incorrect arguments to function."); + Py_RETURN_NONE; + } + + //Create worker thread + pthread_t thread; + Video_out *threadArgs = new Video_out(devarg); + (*self->threads)[devarg] = threadArgs; + threadArgs->outputWidth = widthIn; + threadArgs->outputHeight = heightIn; + threadArgs->outputPxFmt = pxFmtIn; + + pthread_create(&thread, NULL, Video_out_manager_Worker_thread, threadArgs); + + Py_RETURN_NONE; +} + +PyObject *Video_out_manager_Send_frame(Video_out_manager *self, PyObject *args) +{ + //printf("Video_out_manager_Send_frame\n"); + //dev = '\\dev\\video0', img, pixel_format, width, height + + //Process arguments + const char *devarg = NULL; + const char *imgIn = NULL; + const char *pxFmtIn = NULL; + int widthIn = 0; + int heightIn = 0; + + if(PyObject_Length(args) < 5) + { + PyErr_Format(PyExc_RuntimeError, "Too few arguments."); + Py_RETURN_NONE; + } + + PyObject *pydev = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydev); + + PyObject *pyimg = PyTuple_GetItem(args, 1); + imgIn = PyString_AsString(pyimg); + Py_ssize_t imgLen = PyObject_Length(pyimg); + + PyObject *pyPxFmt = PyTuple_GetItem(args, 2); + pxFmtIn = PyString_AsString(pyPxFmt); + + PyObject *pyWidth = PyTuple_GetItem(args, 3); + widthIn = PyInt_AsLong(pyWidth); + + PyObject *pyHeight = PyTuple_GetItem(args, 4); + heightIn = PyInt_AsLong(pyHeight); + + std::map::iterator it = self->threads->find(devarg); + + if(it != self->threads->end()) + { + it->second->SendFrame(imgIn, imgLen, pxFmtIn, widthIn, heightIn); + } + else + { + PyErr_Format(PyExc_RuntimeError, "Device not found."); + Py_RETURN_NONE; + } + + Py_RETURN_NONE; +} + +PyObject *Video_out_manager_close(Video_out_manager *self, PyObject *args) +{ + //Process arguments + const char *devarg = "/dev/video0"; + if(PyTuple_Size(args) >= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + + //Stop worker thread + std::map::iterator it = self->threads->find(devarg); + + if(it != self->threads->end()) + { + it->second->Stop(); + } + + Py_RETURN_NONE; +} + +PyObject *Video_out_manager_list_devices(Video_out_manager *self) +{ + PyObject *out = PyList_New(0); + const char dir[] = "/dev"; + DIR *dp; + struct dirent *dirp; + if((dp = opendir(dir)) == NULL) { + printf("Error(%d) opening %s\n", errno, dir); + Py_RETURN_NONE; + } + + while ((dirp = readdir(dp)) != NULL) { + if (strncmp(dirp->d_name, "video", 5) != 0) continue; + std::string tmp = "/dev/"; + tmp.append(dirp->d_name); + PyList_Append(out, PyString_FromString(tmp.c_str())); + } + closedir(dp); + + PyList_Sort(out); + return out; +} diff --git a/videoout.h b/videoout.h new file mode 100644 index 0000000..bca4d8e --- /dev/null +++ b/videoout.h @@ -0,0 +1,27 @@ + +#ifndef VIDEOOUT_H +#define VIDEOOUT_H + +#include +#include +#include + +class Video_out_manager_cl{ +public: + PyObject_HEAD + std::map *threads; +}; +typedef Video_out_manager_cl Video_out_manager; + +int Video_out_manager_init(Video_out_manager *self, PyObject *args, + PyObject *kwargs); + +void Video_out_manager_dealloc(Video_out_manager *self); + +PyObject *Video_out_manager_open(Video_out_manager *self, PyObject *args); +PyObject *Video_out_manager_Send_frame(Video_out_manager *self, PyObject *args); +PyObject *Video_out_manager_close(Video_out_manager *self, PyObject *args); +PyObject *Video_out_manager_list_devices(Video_out_manager *self); + +#endif //VIDEOOUT_H + From 1c3d870ab7d94c264fad34f968043c4ec69a1658 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Fri, 15 Nov 2013 11:38:03 +0000 Subject: [PATCH 081/256] Refactor video out --- libvideolive.cpp | 23 +++++------------------ v4l2capture.cpp | 23 +++++++++++++++++++++++ v4l2capture.h | 2 ++ v4l2out.cpp | 25 +++++++++++++++++++++++++ v4l2out.h | 2 ++ videoout.cpp | 20 ++++---------------- 6 files changed, 61 insertions(+), 34 deletions(-) diff --git a/libvideolive.cpp b/libvideolive.cpp index 7b1356f..f3bc2ac 100644 --- a/libvideolive.cpp +++ b/libvideolive.cpp @@ -7,7 +7,6 @@ // See README for license #include -#include #include #include #include @@ -15,7 +14,6 @@ #include #include #include -#include #include "pixfmt.h" #include "v4l2capture.h" #include "videoout.h" @@ -292,24 +290,13 @@ static PyObject *Device_manager_close(Device_manager *self, PyObject *args) } static PyObject *Device_manager_list_devices(Device_manager *self) -{ +{ PyObject *out = PyList_New(0); - const char dir[] = "/dev"; - DIR *dp; - struct dirent *dirp; - if((dp = opendir(dir)) == NULL) { - printf("Error(%d) opening %s\n", errno, dir); - Py_RETURN_NONE; - } - - while ((dirp = readdir(dp)) != NULL) { - if (strncmp(dirp->d_name, "video", 5) != 0) continue; - std::string tmp = "/dev/"; - tmp.append(dirp->d_name); - PyList_Append(out, PyString_FromString(tmp.c_str())); + std::vector devLi = List_in_devices(); + for(unsigned i=0; i List_in_devices() +{ + std::vector out; + const char dir[] = "/dev"; + DIR *dp; + struct dirent *dirp; + if((dp = opendir(dir)) == NULL) { + printf("Error(%d) opening %s\n", errno, dir); + return out; + } + + while ((dirp = readdir(dp)) != NULL) { + if (strncmp(dirp->d_name, "video", 5) != 0) continue; + std::string tmp = "/dev/"; + tmp.append(dirp->d_name); + out.push_back(tmp); + } + closedir(dp); + return out; +} + + + diff --git a/v4l2capture.h b/v4l2capture.h index a1be72c..ac725ec 100644 --- a/v4l2capture.h +++ b/v4l2capture.h @@ -124,6 +124,8 @@ class Video_in_Manager : public Base_Video_In void *Video_in_Worker_thread(void *arg); +std::vector List_in_devices(); + // ********************************************************************** #endif //V4L2CAPTURE_H diff --git a/v4l2out.cpp b/v4l2out.cpp index 11a1bc8..e1e8ef3 100644 --- a/v4l2out.cpp +++ b/v4l2out.cpp @@ -13,6 +13,8 @@ #include #include #include +#include +#include #include #include "v4l2out.h" #include "pixfmt.h" @@ -341,3 +343,26 @@ void *Video_out_manager_Worker_thread(void *arg) // ***************************************************************** +std::vector List_out_devices() +{ + std::vector out; + const char dir[] = "/dev"; + DIR *dp; + struct dirent *dirp; + if((dp = opendir(dir)) == NULL) { + printf("Error(%d) opening %s\n", errno, dir); + return out; + } + + while ((dirp = readdir(dp)) != NULL) { + if (strncmp(dirp->d_name, "video", 5) != 0) continue; + std::string tmp = "/dev/"; + tmp.append(dirp->d_name); + out.push_back(tmp); + } + closedir(dp); + return out; +} + + + diff --git a/v4l2out.h b/v4l2out.h index 759a177..6961f96 100644 --- a/v4l2out.h +++ b/v4l2out.h @@ -38,6 +38,8 @@ class Video_out void *Video_out_manager_Worker_thread(void *arg); +std::vector List_out_devices(); + // ****************************************************************** #endif //__V4L2OUT_H__ diff --git a/videoout.cpp b/videoout.cpp index 6372e30..9582f78 100644 --- a/videoout.cpp +++ b/videoout.cpp @@ -1,7 +1,6 @@ #include "videoout.h" #include "v4l2out.h" -#include int Video_out_manager_init(Video_out_manager *self, PyObject *args, PyObject *kwargs) @@ -125,22 +124,11 @@ PyObject *Video_out_manager_close(Video_out_manager *self, PyObject *args) PyObject *Video_out_manager_list_devices(Video_out_manager *self) { PyObject *out = PyList_New(0); - const char dir[] = "/dev"; - DIR *dp; - struct dirent *dirp; - if((dp = opendir(dir)) == NULL) { - printf("Error(%d) opening %s\n", errno, dir); - Py_RETURN_NONE; - } - - while ((dirp = readdir(dp)) != NULL) { - if (strncmp(dirp->d_name, "video", 5) != 0) continue; - std::string tmp = "/dev/"; - tmp.append(dirp->d_name); - PyList_Append(out, PyString_FromString(tmp.c_str())); + std::vector devLi = List_out_devices(); + for(unsigned i=0; i Date: Fri, 15 Nov 2013 11:39:42 +0000 Subject: [PATCH 082/256] Rename video in manager --- libvideolive.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libvideolive.cpp b/libvideolive.cpp index f3bc2ac..146930f 100644 --- a/libvideolive.cpp +++ b/libvideolive.cpp @@ -398,7 +398,7 @@ PyMODINIT_FUNC initvideolive(void) } Py_INCREF(&Device_manager_type); - PyModule_AddObject(module, "Device_manager", (PyObject *)&Device_manager_type); + PyModule_AddObject(module, "Video_in_manager", (PyObject *)&Device_manager_type); PyModule_AddObject(module, "Video_out_manager", (PyObject *)&Video_out_manager_type); } From 1121f83aea2345cd49cd7f6fa85c05713a608f9f Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Fri, 15 Nov 2013 11:51:36 +0000 Subject: [PATCH 083/256] Video out code defined in terms of abstract class --- base.h | 14 ++++++++++++-- libvideolive.cpp | 1 - v4l2capture.cpp | 2 +- v4l2out.cpp | 6 +++--- v4l2out.h | 3 ++- videoout.cpp | 8 ++++---- videoout.h | 3 ++- 7 files changed, 24 insertions(+), 13 deletions(-) diff --git a/base.h b/base.h index 9948f1e..a6f091b 100644 --- a/base.h +++ b/base.h @@ -44,11 +44,11 @@ class FrameMetaData }; -// ********************************************************************** - class Base_Video_In { public: + Base_Video_In() {}; + virtual ~Base_Video_In() {}; virtual void Stop() {}; virtual void WaitForStop() {}; virtual void OpenDevice() {}; @@ -59,5 +59,15 @@ class Base_Video_In virtual int GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) {return 0;}; }; +// ********************************************************************** + +class Base_Video_Out +{ +public: + virtual void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) {}; + virtual void Stop() {}; + virtual int WaitForStop() {return 1;}; +}; + #endif //BASE_H diff --git a/libvideolive.cpp b/libvideolive.cpp index 146930f..57d63c0 100644 --- a/libvideolive.cpp +++ b/libvideolive.cpp @@ -128,7 +128,6 @@ static PyObject *Device_manager_open(Device_manager *self, PyObject *args) Py_RETURN_NONE; } - static PyObject *Device_manager_set_format(Device_manager *self, PyObject *args) { int size_x; diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 02e3c32..1386d4d 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -84,7 +84,7 @@ int my_ioctl(int fd, int request, void *arg, int utimeout = -1) // ************************************************************************** -Video_in_Manager::Video_in_Manager(const char *devNameIn) +Video_in_Manager::Video_in_Manager(const char *devNameIn) : Base_Video_In() { stop = 0; stopped = 1; diff --git a/v4l2out.cpp b/v4l2out.cpp index e1e8ef3..ca8a592 100644 --- a/v4l2out.cpp +++ b/v4l2out.cpp @@ -34,8 +34,6 @@ void print_format(struct v4l2_format*vid_format) { printf(" vid_format->fmt.pix.colorspace =%d\n", vid_format->fmt.pix.colorspace ); } -//******************************************************************* - class SendFrameArgs { public: @@ -66,7 +64,9 @@ class SendFrameArgs } }; -Video_out::Video_out(const char *devNameIn) +//******************************************************************* + +Video_out::Video_out(const char *devNameIn) : Base_Video_Out() { this->fdwr = 0; framesize = 0; diff --git a/v4l2out.h b/v4l2out.h index 6961f96..841daa8 100644 --- a/v4l2out.h +++ b/v4l2out.h @@ -4,8 +4,9 @@ #include #include #include +#include "base.h" -class Video_out +class Video_out : public Base_Video_Out { public: std::string devName; diff --git a/videoout.cpp b/videoout.cpp index 9582f78..7d06159 100644 --- a/videoout.cpp +++ b/videoout.cpp @@ -5,14 +5,14 @@ int Video_out_manager_init(Video_out_manager *self, PyObject *args, PyObject *kwargs) { - self->threads = new std::map; + self->threads = new std::map; return 0; } void Video_out_manager_dealloc(Video_out_manager *self) { //Stop high level threads - for(std::map::iterator it = self->threads->begin(); + for(std::map::iterator it = self->threads->begin(); it != self->threads->end(); it++) { it->second->Stop(); @@ -85,7 +85,7 @@ PyObject *Video_out_manager_Send_frame(Video_out_manager *self, PyObject *args) PyObject *pyHeight = PyTuple_GetItem(args, 4); heightIn = PyInt_AsLong(pyHeight); - std::map::iterator it = self->threads->find(devarg); + std::map::iterator it = self->threads->find(devarg); if(it != self->threads->end()) { @@ -111,7 +111,7 @@ PyObject *Video_out_manager_close(Video_out_manager *self, PyObject *args) } //Stop worker thread - std::map::iterator it = self->threads->find(devarg); + std::map::iterator it = self->threads->find(devarg); if(it != self->threads->end()) { diff --git a/videoout.h b/videoout.h index bca4d8e..2713724 100644 --- a/videoout.h +++ b/videoout.h @@ -5,11 +5,12 @@ #include #include #include +#include "base.h" class Video_out_manager_cl{ public: PyObject_HEAD - std::map *threads; + std::map *threads; }; typedef Video_out_manager_cl Video_out_manager; From b5cc35f10cda3657b8bb50281b0856c1cc447c44 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Fri, 15 Nov 2013 12:00:46 +0000 Subject: [PATCH 084/256] Move high level capture code to separate files --- libvideolive.cpp | 244 +---------------------------------------------- setup.py | 2 +- videoin.cpp | 226 +++++++++++++++++++++++++++++++++++++++++++ videoin.h | 38 ++++++++ 4 files changed, 266 insertions(+), 244 deletions(-) create mode 100644 videoin.cpp create mode 100644 videoin.h diff --git a/libvideolive.cpp b/libvideolive.cpp index 57d63c0..95349fe 100644 --- a/libvideolive.cpp +++ b/libvideolive.cpp @@ -15,25 +15,8 @@ #include #include #include "pixfmt.h" -#include "v4l2capture.h" #include "videoout.h" - -typedef struct { - PyObject_HEAD - int fd; - struct buffer *buffers; - int buffer_count; -} Video_device; - -class Device_manager_cl{ -public: - PyObject_HEAD - std::map *threadArgStore; -}; -typedef Device_manager_cl Device_manager; - -static PyObject *Device_manager_stop(Device_manager *self, PyObject *args); -static PyObject *Device_manager_close(Device_manager *self, PyObject *args); +#include "videoin.h" // ********************************************************************* @@ -75,231 +58,6 @@ PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) return outBufferPy; } -// ********************************************************************** - -static void Device_manager_dealloc(Device_manager *self) -{ - //Stop high level threads - for(std::map::iterator it = self->threadArgStore->begin(); - it != self->threadArgStore->end(); it++) - { - PyObject *args = PyTuple_New(1); - PyTuple_SetItem(args, 0, PyString_FromString(it->first.c_str())); - Device_manager_stop(self, args); - Py_DECREF(args); - } - - delete self->threadArgStore; - self->ob_type->tp_free((PyObject *)self); -} - -static int Device_manager_init(Device_manager *self, PyObject *args, - PyObject *kwargs) -{ - self->threadArgStore = new std::map; - return 0; -} - -static PyObject *Device_manager_open(Device_manager *self, PyObject *args) -{ - //Process arguments - const char *devarg = "/dev/video0"; - if(PyTuple_Size(args) >= 1) - { - PyObject *pydevarg = PyTuple_GetItem(args, 0); - devarg = PyString_AsString(pydevarg); - } - - //Check this device has not already been opened - std::map::iterator it = self->threadArgStore->find(devarg); - if(it!=self->threadArgStore->end()) - { - PyErr_Format(PyExc_RuntimeError, "Device already opened."); - Py_RETURN_NONE; - } - - pthread_t thread; - Video_in_Manager *threadArgs = new Video_in_Manager(devarg); - (*self->threadArgStore)[devarg] = threadArgs; - pthread_create(&thread, NULL, Video_in_Worker_thread, threadArgs); - - threadArgs->OpenDevice(); - - Py_RETURN_NONE; -} - -static PyObject *Device_manager_set_format(Device_manager *self, PyObject *args) -{ - int size_x; - int size_y; - const char *fmt = NULL; - const char *devarg = NULL; - - if(!PyArg_ParseTuple(args, "sii|s", &devarg, &size_x, &size_y, &fmt)) - { - Py_RETURN_NONE; - } - - //Check this device is valid - std::map::iterator it = self->threadArgStore->find(devarg); - if(it==self->threadArgStore->end()) - { - PyErr_Format(PyExc_RuntimeError, "Device already not ready."); - Py_RETURN_NONE; - } - - class Base_Video_In *threadArgs = (*self->threadArgStore)[devarg]; - threadArgs->SetFormat(fmt, size_x, size_y); - - Py_RETURN_NONE; -} - -static PyObject *Device_manager_Start(Device_manager *self, PyObject *args) -{ - - //Process arguments - const char *devarg = "/dev/video0"; - if(PyTuple_Size(args) >= 1) - { - PyObject *pydevarg = PyTuple_GetItem(args, 0); - devarg = PyString_AsString(pydevarg); - } - - long buffer_count = 10; - if(PyTuple_Size(args) >= 4) - { - PyObject *pybufferarg = PyTuple_GetItem(args, 4); - buffer_count = PyInt_AsLong(pybufferarg); - } - - //Check this device is valid - std::map::iterator it = self->threadArgStore->find(devarg); - if(it==self->threadArgStore->end()) - { - PyErr_Format(PyExc_RuntimeError, "Device already not ready."); - Py_RETURN_NONE; - } - - class Base_Video_In *threadArgs = (*self->threadArgStore)[devarg]; - threadArgs->StartDevice(buffer_count); - - Py_RETURN_NONE; -} - -static PyObject *Device_manager_Get_frame(Device_manager *self, PyObject *args) -{ - - //Process arguments - const char *devarg = "/dev/video0"; - if(PyTuple_Size(args) >= 1) - { - PyObject *pydevarg = PyTuple_GetItem(args, 0); - devarg = PyString_AsString(pydevarg); - } - - //Check this device is valid - std::map::iterator it = self->threadArgStore->find(devarg); - if(it==self->threadArgStore->end()) - { - PyErr_Format(PyExc_RuntimeError, "Device already not ready."); - Py_RETURN_NONE; - } - - class Base_Video_In *threadArgs = (*self->threadArgStore)[devarg]; - unsigned char *buffOut = NULL; - class FrameMetaData metaOut; - - int ok = threadArgs->GetFrame(&buffOut, &metaOut); - if(ok && buffOut != NULL) - { - //Format output to python - PyObject *pymeta = PyDict_New(); - PyDict_SetItemString(pymeta, "width", PyInt_FromLong(metaOut.width)); - PyDict_SetItemString(pymeta, "height", PyInt_FromLong(metaOut.height)); - PyDict_SetItemString(pymeta, "format", PyString_FromString(metaOut.fmt.c_str())); - PyDict_SetItemString(pymeta, "sequence", PyInt_FromLong(metaOut.sequence)); - PyDict_SetItemString(pymeta, "tv_sec", PyInt_FromLong(metaOut.tv_sec)); - PyDict_SetItemString(pymeta, "tv_usec", PyInt_FromLong(metaOut.tv_usec)); - - PyObject *out = PyTuple_New(2); - PyTuple_SetItem(out, 0, PyByteArray_FromStringAndSize((char *)buffOut, metaOut.buffLen)); - PyTuple_SetItem(out, 1, pymeta); - - delete [] buffOut; - return out; - } - - Py_RETURN_NONE; -} - -static PyObject *Device_manager_stop(Device_manager *self, PyObject *args) -{ - //Process arguments - const char *devarg = "/dev/video0"; - if(PyTuple_Size(args) >= 1) - { - PyObject *pydevarg = PyTuple_GetItem(args, 0); - devarg = PyString_AsString(pydevarg); - } - - //Check this device is valid - std::map::iterator it = self->threadArgStore->find(devarg); - if(it==self->threadArgStore->end()) - { - PyErr_Format(PyExc_RuntimeError, "Device already not ready."); - Py_RETURN_NONE; - } - - class Base_Video_In *threadArgs = (*self->threadArgStore)[devarg]; - threadArgs->StopDevice(); - - Py_RETURN_NONE; -} - -static PyObject *Device_manager_close(Device_manager *self, PyObject *args) -{ - //Process arguments - const char *devarg = "/dev/video0"; - if(PyTuple_Size(args) >= 1) - { - PyObject *pydevarg = PyTuple_GetItem(args, 0); - devarg = PyString_AsString(pydevarg); - } - - //Check this device is valid - std::map::iterator it = self->threadArgStore->find(devarg); - if(it==self->threadArgStore->end()) - { - PyErr_Format(PyExc_RuntimeError, "Device already not ready."); - Py_RETURN_NONE; - } - - class Base_Video_In *threadArgs = (*self->threadArgStore)[devarg]; - threadArgs->CloseDevice(); - - //Stop worker thread - threadArgs->Stop(); - - //Release memeory - threadArgs->WaitForStop(); - delete threadArgs; - self->threadArgStore->erase(devarg); - - Py_RETURN_NONE; -} - -static PyObject *Device_manager_list_devices(Device_manager *self) -{ - PyObject *out = PyList_New(0); - std::vector devLi = List_in_devices(); - for(unsigned i=0; i::iterator it = self->threadArgStore->begin(); + it != self->threadArgStore->end(); it++) + { + PyObject *args = PyTuple_New(1); + PyTuple_SetItem(args, 0, PyString_FromString(it->first.c_str())); + Device_manager_stop(self, args); + Py_DECREF(args); + } + + delete self->threadArgStore; + self->ob_type->tp_free((PyObject *)self); +} + +int Device_manager_init(Device_manager *self, PyObject *args, + PyObject *kwargs) +{ + self->threadArgStore = new std::map; + return 0; +} + +PyObject *Device_manager_open(Device_manager *self, PyObject *args) +{ + //Process arguments + const char *devarg = "/dev/video0"; + if(PyTuple_Size(args) >= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + + //Check this device has not already been opened + std::map::iterator it = self->threadArgStore->find(devarg); + if(it!=self->threadArgStore->end()) + { + PyErr_Format(PyExc_RuntimeError, "Device already opened."); + Py_RETURN_NONE; + } + + pthread_t thread; + Video_in_Manager *threadArgs = new Video_in_Manager(devarg); + (*self->threadArgStore)[devarg] = threadArgs; + pthread_create(&thread, NULL, Video_in_Worker_thread, threadArgs); + + threadArgs->OpenDevice(); + + Py_RETURN_NONE; +} + +PyObject *Device_manager_set_format(Device_manager *self, PyObject *args) +{ + int size_x; + int size_y; + const char *fmt = NULL; + const char *devarg = NULL; + + if(!PyArg_ParseTuple(args, "sii|s", &devarg, &size_x, &size_y, &fmt)) + { + Py_RETURN_NONE; + } + + //Check this device is valid + std::map::iterator it = self->threadArgStore->find(devarg); + if(it==self->threadArgStore->end()) + { + PyErr_Format(PyExc_RuntimeError, "Device already not ready."); + Py_RETURN_NONE; + } + + class Base_Video_In *threadArgs = (*self->threadArgStore)[devarg]; + threadArgs->SetFormat(fmt, size_x, size_y); + + Py_RETURN_NONE; +} + +PyObject *Device_manager_Start(Device_manager *self, PyObject *args) +{ + + //Process arguments + const char *devarg = "/dev/video0"; + if(PyTuple_Size(args) >= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + + long buffer_count = 10; + if(PyTuple_Size(args) >= 4) + { + PyObject *pybufferarg = PyTuple_GetItem(args, 4); + buffer_count = PyInt_AsLong(pybufferarg); + } + + //Check this device is valid + std::map::iterator it = self->threadArgStore->find(devarg); + if(it==self->threadArgStore->end()) + { + PyErr_Format(PyExc_RuntimeError, "Device already not ready."); + Py_RETURN_NONE; + } + + class Base_Video_In *threadArgs = (*self->threadArgStore)[devarg]; + threadArgs->StartDevice(buffer_count); + + Py_RETURN_NONE; +} + +PyObject *Device_manager_Get_frame(Device_manager *self, PyObject *args) +{ + + //Process arguments + const char *devarg = "/dev/video0"; + if(PyTuple_Size(args) >= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + + //Check this device is valid + std::map::iterator it = self->threadArgStore->find(devarg); + if(it==self->threadArgStore->end()) + { + PyErr_Format(PyExc_RuntimeError, "Device already not ready."); + Py_RETURN_NONE; + } + + class Base_Video_In *threadArgs = (*self->threadArgStore)[devarg]; + unsigned char *buffOut = NULL; + class FrameMetaData metaOut; + + int ok = threadArgs->GetFrame(&buffOut, &metaOut); + if(ok && buffOut != NULL) + { + //Format output to python + PyObject *pymeta = PyDict_New(); + PyDict_SetItemString(pymeta, "width", PyInt_FromLong(metaOut.width)); + PyDict_SetItemString(pymeta, "height", PyInt_FromLong(metaOut.height)); + PyDict_SetItemString(pymeta, "format", PyString_FromString(metaOut.fmt.c_str())); + PyDict_SetItemString(pymeta, "sequence", PyInt_FromLong(metaOut.sequence)); + PyDict_SetItemString(pymeta, "tv_sec", PyInt_FromLong(metaOut.tv_sec)); + PyDict_SetItemString(pymeta, "tv_usec", PyInt_FromLong(metaOut.tv_usec)); + + PyObject *out = PyTuple_New(2); + PyTuple_SetItem(out, 0, PyByteArray_FromStringAndSize((char *)buffOut, metaOut.buffLen)); + PyTuple_SetItem(out, 1, pymeta); + + delete [] buffOut; + return out; + } + + Py_RETURN_NONE; +} + +PyObject *Device_manager_stop(Device_manager *self, PyObject *args) +{ + //Process arguments + const char *devarg = "/dev/video0"; + if(PyTuple_Size(args) >= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + + //Check this device is valid + std::map::iterator it = self->threadArgStore->find(devarg); + if(it==self->threadArgStore->end()) + { + PyErr_Format(PyExc_RuntimeError, "Device already not ready."); + Py_RETURN_NONE; + } + + class Base_Video_In *threadArgs = (*self->threadArgStore)[devarg]; + threadArgs->StopDevice(); + + Py_RETURN_NONE; +} + +PyObject *Device_manager_close(Device_manager *self, PyObject *args) +{ + //Process arguments + const char *devarg = "/dev/video0"; + if(PyTuple_Size(args) >= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + + //Check this device is valid + std::map::iterator it = self->threadArgStore->find(devarg); + if(it==self->threadArgStore->end()) + { + PyErr_Format(PyExc_RuntimeError, "Device already not ready."); + Py_RETURN_NONE; + } + + class Base_Video_In *threadArgs = (*self->threadArgStore)[devarg]; + threadArgs->CloseDevice(); + + //Stop worker thread + threadArgs->Stop(); + + //Release memeory + threadArgs->WaitForStop(); + delete threadArgs; + self->threadArgStore->erase(devarg); + + Py_RETURN_NONE; +} + +PyObject *Device_manager_list_devices(Device_manager *self) +{ + PyObject *out = PyList_New(0); + std::vector devLi = List_in_devices(); + for(unsigned i=0; i +#include "base.h" +#include +#include + +typedef struct { + PyObject_HEAD + int fd; + struct buffer *buffers; + int buffer_count; +} Video_device; + +class Device_manager_cl{ +public: + PyObject_HEAD + std::map *threadArgStore; +}; +typedef Device_manager_cl Device_manager; + +int Device_manager_init(Device_manager *self, PyObject *args, + PyObject *kwargs); +void Device_manager_dealloc(Device_manager *self); +PyObject *Device_manager_open(Device_manager *self, PyObject *args); +PyObject *Device_manager_set_format(Device_manager *self, PyObject *args); +PyObject *Device_manager_Start(Device_manager *self, PyObject *args); +PyObject *Device_manager_Get_frame(Device_manager *self, PyObject *args); +PyObject *Device_manager_stop(Device_manager *self, PyObject *args); +PyObject *Device_manager_close(Device_manager *self, PyObject *args); +PyObject *Device_manager_list_devices(Device_manager *self); + + + +#endif //VIDEOIN_H + From 77311f3b9a06e25a9e96fd56b7a309eeec232d46 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Fri, 15 Nov 2013 12:04:13 +0000 Subject: [PATCH 085/256] Update readme --- README | 28 +++++++--------------------- 1 file changed, 7 insertions(+), 21 deletions(-) diff --git a/README b/README index d52c42d..3a1c763 100644 --- a/README +++ b/README @@ -1,5 +1,5 @@ -python-v4l2capture 1.4.x -Python extension to capture video with video4linux2 +libvideolive +Capture and stream video in python 2009, 2010, 2011 Fredrik Portstrom 2011 Joakim Gebart @@ -10,9 +10,8 @@ This software may be used and redistributed under the terms of the GPL v2 (or la Introduction ============ -python-v4l2capture is a slim and easy to use Python extension for -capturing video with video4linux2. It supports libv4l to convert any -image format to RGB or YUV420. +libvideolive is a slim and easy to use Python extension for +capturing and streaming video. this fork of python-v4l2capture: https://github.com/gebart/python-v4l2capture @@ -23,34 +22,21 @@ libv4l: http://freshmeat.net/projects/libv4l Installation ============ -v4l2capture requires libv4l by default. You can compile v4l2capture -without libv4l, but that reduces image format support to YUYV input -and RGB output only. You can do so by erasing ', libraries = ["v4l2"]' +libvideolive on linux requires libv4l by default. You can do so by erasing ', libraries = ["v4l2"]' in setup.py and erasing '#define USE_LIBV4L' in v4l2capture.c. -python-v4l2capture uses distutils. +libvideolive uses distutils. To build: ./setup.py build To build and install: ./setup.py install Example ======= -See capture_picture.py, capture_picture_delayed.py and list_devices.py. +To do Change log ========== (see git log for latest changes) -1.4 (2011-03-18) - Added support for YUV420 output. - -1.3 (2010-07-21) - Added set of capabilities to the return value of - get_info. Updated list_devices.py. - -1.2 (2010-04-01) - Forked example script into capture_picture.py and - capture_picture_delayed.py. - -1.1 (2009-11-03) - Updated URL and documentation. - -1.0 (2009-02-28) - Initial release. From 5a598f36440febc71a4f7583040148356ead4d6a Mon Sep 17 00:00:00 2001 From: TimSC Date: Fri, 15 Nov 2013 17:32:13 +0000 Subject: [PATCH 086/256] Create files for windows implementation --- base.h | 4 ++++ mfvideoin.cpp | 2 ++ mfvideoin.h | 23 +++++++++++++++++++++++ mfvideoout.cpp | 2 ++ mfvideoout.h | 19 +++++++++++++++++++ setup.py | 16 +++++++++++++--- 6 files changed, 63 insertions(+), 3 deletions(-) create mode 100644 mfvideoin.cpp create mode 100644 mfvideoin.h create mode 100644 mfvideoout.cpp create mode 100644 mfvideoout.h diff --git a/base.h b/base.h index a6f091b..31e5a11 100644 --- a/base.h +++ b/base.h @@ -49,6 +49,7 @@ class Base_Video_In public: Base_Video_In() {}; virtual ~Base_Video_In() {}; + virtual void Stop() {}; virtual void WaitForStop() {}; virtual void OpenDevice() {}; @@ -64,6 +65,9 @@ class Base_Video_In class Base_Video_Out { public: + Base_Video_Out() {}; + virtual ~Base_Video_Out() {}; + virtual void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) {}; virtual void Stop() {}; virtual int WaitForStop() {return 1;}; diff --git a/mfvideoin.cpp b/mfvideoin.cpp new file mode 100644 index 0000000..1c950f0 --- /dev/null +++ b/mfvideoin.cpp @@ -0,0 +1,2 @@ + +#include "mfvideoin.h" diff --git a/mfvideoin.h b/mfvideoin.h new file mode 100644 index 0000000..ba7f3c4 --- /dev/null +++ b/mfvideoin.h @@ -0,0 +1,23 @@ + +#ifndef MFVIDEOIN_H +#define MFVIDEOIN_H + +#include "base.h" + +class MfVideoIn : public Base_Video_In +{ +public: + MfVideoIn() : Base_Video_In() {}; + virtual ~MfVideoIn() {}; + + virtual void Stop() {}; + virtual void WaitForStop() {}; + virtual void OpenDevice() {}; + virtual void SetFormat(const char *fmt, int width, int height) {}; + virtual void StartDevice(int buffer_count) {}; + virtual void StopDevice() {}; + virtual void CloseDevice() {}; + virtual int GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) {return 0;}; +}; + +#endif //MFVIDEOIN_H diff --git a/mfvideoout.cpp b/mfvideoout.cpp new file mode 100644 index 0000000..7d471c2 --- /dev/null +++ b/mfvideoout.cpp @@ -0,0 +1,2 @@ + +#include "mfvideoout.h" diff --git a/mfvideoout.h b/mfvideoout.h new file mode 100644 index 0000000..e12855e --- /dev/null +++ b/mfvideoout.h @@ -0,0 +1,19 @@ + +#ifndef MFVIDEOOUT_H +#define MFVIDEOOUT_H + +#include "base.h" + +class MfVideoOut : public Base_Video_Out +{ +public: + MfVideoOut() : Base_Video_Out() {}; + virtual ~MfVideoOut() {}; + + void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) {}; + void Stop() {}; + int WaitForStop() {return 1;}; +}; + +#endif //MFVIDEOOUT_H + diff --git a/setup.py b/setup.py index 060e247..eaa1b46 100755 --- a/setup.py +++ b/setup.py @@ -11,6 +11,17 @@ # See README for license from distutils.core import Extension, setup +import os + +if os.name == "nt": + videolive = Extension("videolive", ["pixfmt.cpp", "libvideolive.cpp", "videoout.cpp", "videoin.cpp", "mfvideoin.cpp", "mfvideoout.cpp"], + library_dirs=['C:\Dev\Lib\libjpeg-turbo-gcc\lib'], + include_dirs=['C:\Dev\Lib\libjpeg-turbo-gcc\include'], + libraries = ["pthread", "jpeg"]) +else: + videolive = Extension("videolive", ["v4l2capture.cpp", "v4l2out.cpp", "pixfmt.cpp", "libvideolive.cpp", "videoout.cpp", "videoin.cpp"], + libraries = ["v4l2", "pthread", "jpeg"]) + setup( name = "videolive", version = "1.0", @@ -23,7 +34,6 @@ classifiers = [ "License :: GPL", "Programming Language :: C++"], - ext_modules = [ - Extension("videolive", ["v4l2capture.cpp", "v4l2out.cpp", "pixfmt.cpp", "libvideolive.cpp", "videoout.cpp", "videoin.cpp"], - libraries = ["v4l2", "pthread", "jpeg"])]) + ext_modules = [videolive] + ) From 1c54c8607687ac9de1671ca5af532974914949f8 Mon Sep 17 00:00:00 2001 From: TimSC Date: Fri, 15 Nov 2013 17:58:21 +0000 Subject: [PATCH 087/256] Compiles empty classes on windows --- base.h | 6 ++++++ mfvideoin.cpp | 14 ++++++++++++++ mfvideoin.h | 7 ++++++- mfvideoout.cpp | 14 ++++++++++++++ mfvideoout.h | 11 ++++++++++- setup.py | 2 ++ videoin.cpp | 17 +++++++++++++++++ videoout.cpp | 22 +++++++++++++++++++--- 8 files changed, 88 insertions(+), 5 deletions(-) diff --git a/base.h b/base.h index 31e5a11..205f8c5 100644 --- a/base.h +++ b/base.h @@ -58,6 +58,8 @@ class Base_Video_In virtual void StopDevice() {}; virtual void CloseDevice() {}; virtual int GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) {return 0;}; + + void Run() {}; }; // ********************************************************************** @@ -71,6 +73,10 @@ class Base_Video_Out virtual void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) {}; virtual void Stop() {}; virtual int WaitForStop() {return 1;}; + virtual void SetOutputSize(int width, int height) {}; + virtual void SetOutputPxFmt(const char *fmt) {}; + + void Run() {}; }; #endif //BASE_H diff --git a/mfvideoin.cpp b/mfvideoin.cpp index 1c950f0..9d73741 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -1,2 +1,16 @@ #include "mfvideoin.h" + +void *MfVideoIn_Worker_thread(void *arg) +{ + class MfVideoIn *argobj = (class MfVideoIn*) arg; + argobj->Run(); + + return NULL; +} + +std::vector List_in_devices() +{ + std::vector out; + return out; +} \ No newline at end of file diff --git a/mfvideoin.h b/mfvideoin.h index ba7f3c4..1292484 100644 --- a/mfvideoin.h +++ b/mfvideoin.h @@ -2,12 +2,14 @@ #ifndef MFVIDEOIN_H #define MFVIDEOIN_H +#include +#include #include "base.h" class MfVideoIn : public Base_Video_In { public: - MfVideoIn() : Base_Video_In() {}; + MfVideoIn(const char *devName) : Base_Video_In() {}; virtual ~MfVideoIn() {}; virtual void Stop() {}; @@ -20,4 +22,7 @@ class MfVideoIn : public Base_Video_In virtual int GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) {return 0;}; }; +void *MfVideoIn_Worker_thread(void *arg); +std::vector List_in_devices(); + #endif //MFVIDEOIN_H diff --git a/mfvideoout.cpp b/mfvideoout.cpp index 7d471c2..aff4c27 100644 --- a/mfvideoout.cpp +++ b/mfvideoout.cpp @@ -1,2 +1,16 @@ #include "mfvideoout.h" + +void *MfVideoOut_Worker_thread(void *arg) +{ + class MfVideoOut *argobj = (class MfVideoOut*) arg; + argobj->Run(); + + return NULL; +} + +std::vector List_out_devices() +{ + std::vector out; + return out; +} diff --git a/mfvideoout.h b/mfvideoout.h index e12855e..d430eef 100644 --- a/mfvideoout.h +++ b/mfvideoout.h @@ -2,18 +2,27 @@ #ifndef MFVIDEOOUT_H #define MFVIDEOOUT_H +#include +#include #include "base.h" class MfVideoOut : public Base_Video_Out { public: - MfVideoOut() : Base_Video_Out() {}; + MfVideoOut(const char *devName) : Base_Video_Out() {}; virtual ~MfVideoOut() {}; void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) {}; void Stop() {}; int WaitForStop() {return 1;}; + + virtual void SetOutputSize(int width, int height) {}; + virtual void SetOutputPxFmt(const char *fmt) {}; }; +void *MfVideoOut_Worker_thread(void *arg); + +std::vector List_out_devices(); + #endif //MFVIDEOOUT_H diff --git a/setup.py b/setup.py index eaa1b46..6272bd0 100755 --- a/setup.py +++ b/setup.py @@ -15,11 +15,13 @@ if os.name == "nt": videolive = Extension("videolive", ["pixfmt.cpp", "libvideolive.cpp", "videoout.cpp", "videoin.cpp", "mfvideoin.cpp", "mfvideoout.cpp"], + define_macros=[('_'+os.name.upper(), None)], library_dirs=['C:\Dev\Lib\libjpeg-turbo-gcc\lib'], include_dirs=['C:\Dev\Lib\libjpeg-turbo-gcc\include'], libraries = ["pthread", "jpeg"]) else: videolive = Extension("videolive", ["v4l2capture.cpp", "v4l2out.cpp", "pixfmt.cpp", "libvideolive.cpp", "videoout.cpp", "videoin.cpp"], + define_macros=[('_'+os.name.upper(), None)], libraries = ["v4l2", "pthread", "jpeg"]) setup( diff --git a/videoin.cpp b/videoin.cpp index 867baaa..29bbb88 100644 --- a/videoin.cpp +++ b/videoin.cpp @@ -1,6 +1,11 @@ #include "videoin.h" +#ifdef _NT +#include "mfvideoin.h" +#endif +#ifdef _POSIX #include "v4l2capture.h" +#endif void Device_manager_dealloc(Device_manager *self) { @@ -44,9 +49,21 @@ PyObject *Device_manager_open(Device_manager *self, PyObject *args) } pthread_t thread; + #ifdef _POSIX Video_in_Manager *threadArgs = new Video_in_Manager(devarg); + #endif + #ifdef _NT + MfVideoIn *threadArgs = new MfVideoIn(devarg); + #endif + (*self->threadArgStore)[devarg] = threadArgs; + + #ifdef _POSIX pthread_create(&thread, NULL, Video_in_Worker_thread, threadArgs); + #endif + #ifdef _NT + pthread_create(&thread, NULL, MfVideoIn_Worker_thread, threadArgs); + #endif threadArgs->OpenDevice(); diff --git a/videoout.cpp b/videoout.cpp index 7d06159..a707a27 100644 --- a/videoout.cpp +++ b/videoout.cpp @@ -1,6 +1,12 @@ +#include #include "videoout.h" +#ifdef _NT +#include "mfvideoout.h" +#endif +#if _POSIX #include "v4l2out.h" +#endif int Video_out_manager_init(Video_out_manager *self, PyObject *args, PyObject *kwargs) @@ -40,13 +46,23 @@ PyObject *Video_out_manager_open(Video_out_manager *self, PyObject *args) //Create worker thread pthread_t thread; + #ifdef _POSIX Video_out *threadArgs = new Video_out(devarg); + #endif + #ifdef _NT + MfVideoOut *threadArgs = new MfVideoOut(devarg); + #endif + (*self->threads)[devarg] = threadArgs; - threadArgs->outputWidth = widthIn; - threadArgs->outputHeight = heightIn; - threadArgs->outputPxFmt = pxFmtIn; + threadArgs->SetOutputSize(widthIn, heightIn); + threadArgs->SetOutputPxFmt(pxFmtIn); + #ifdef _POSIX pthread_create(&thread, NULL, Video_out_manager_Worker_thread, threadArgs); + #endif + #ifdef _NT + pthread_create(&thread, NULL, MfVideoOut_Worker_thread, threadArgs); + #endif Py_RETURN_NONE; } From 4551650ed84b6322814667a8168745c33cb092c0 Mon Sep 17 00:00:00 2001 From: TimSC Date: Fri, 15 Nov 2013 18:19:49 +0000 Subject: [PATCH 088/256] Adapt to use visual studio --- setup.py | 8 +++++--- videoin.cpp | 1 + videoout.cpp | 1 + 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/setup.py b/setup.py index 6272bd0..73bd8d9 100755 --- a/setup.py +++ b/setup.py @@ -10,15 +10,17 @@ # 2013, Tim Sheerman-Chase # See README for license +# Visual Studio 2010 trick: SET VS90COMNTOOLS=%VS100COMNTOOLS% + from distutils.core import Extension, setup import os if os.name == "nt": videolive = Extension("videolive", ["pixfmt.cpp", "libvideolive.cpp", "videoout.cpp", "videoin.cpp", "mfvideoin.cpp", "mfvideoout.cpp"], define_macros=[('_'+os.name.upper(), None)], - library_dirs=['C:\Dev\Lib\libjpeg-turbo-gcc\lib'], - include_dirs=['C:\Dev\Lib\libjpeg-turbo-gcc\include'], - libraries = ["pthread", "jpeg"]) + library_dirs=['C:\Dev\Lib\libjpeg-turbo-win\lib', "C:\Dev\Lib\pthreads\pthreads.2"], + include_dirs=['C:\Dev\Lib\libjpeg-turbo-win\include', "C:\Dev\Lib\pthreads\pthreads.2"], + libraries = ["pthreadVC2", "jpeg"]) else: videolive = Extension("videolive", ["v4l2capture.cpp", "v4l2out.cpp", "pixfmt.cpp", "libvideolive.cpp", "videoout.cpp", "videoin.cpp"], define_macros=[('_'+os.name.upper(), None)], diff --git a/videoin.cpp b/videoin.cpp index 29bbb88..1027e8f 100644 --- a/videoin.cpp +++ b/videoin.cpp @@ -1,5 +1,6 @@ #include "videoin.h" +#include #ifdef _NT #include "mfvideoin.h" #endif diff --git a/videoout.cpp b/videoout.cpp index a707a27..8202972 100644 --- a/videoout.cpp +++ b/videoout.cpp @@ -1,5 +1,6 @@ #include +#include #include "videoout.h" #ifdef _NT #include "mfvideoout.h" From a6cdf84795da47efe1ffcfdf2b00207ffc5ddd01 Mon Sep 17 00:00:00 2001 From: TimSC Date: Fri, 15 Nov 2013 18:33:11 +0000 Subject: [PATCH 089/256] Add manifest option --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index 73bd8d9..419fd71 100755 --- a/setup.py +++ b/setup.py @@ -20,6 +20,7 @@ define_macros=[('_'+os.name.upper(), None)], library_dirs=['C:\Dev\Lib\libjpeg-turbo-win\lib', "C:\Dev\Lib\pthreads\pthreads.2"], include_dirs=['C:\Dev\Lib\libjpeg-turbo-win\include', "C:\Dev\Lib\pthreads\pthreads.2"], + extra_link_args=["/MANIFEST"], libraries = ["pthreadVC2", "jpeg"]) else: videolive = Extension("videolive", ["v4l2capture.cpp", "v4l2out.cpp", "pixfmt.cpp", "libvideolive.cpp", "videoout.cpp", "videoin.cpp"], From bea004121cd602f7b16cb661b84b9d0580f6c545 Mon Sep 17 00:00:00 2001 From: TimSC Date: Fri, 15 Nov 2013 18:37:57 +0000 Subject: [PATCH 090/256] Update docs --- README | 26 ++++++++++++++++++++------ setup.py | 2 -- 2 files changed, 20 insertions(+), 8 deletions(-) diff --git a/README b/README index 3a1c763..7efe3be 100644 --- a/README +++ b/README @@ -19,15 +19,29 @@ original python-v4l2capture: http://fredrik.jemla.eu/v4l2capture libv4l: http://freshmeat.net/projects/libv4l -Installation -============ +Installation on Linux +===================== libvideolive on linux requires libv4l by default. You can do so by erasing ', libraries = ["v4l2"]' -in setup.py and erasing '#define USE_LIBV4L' in v4l2capture.c. +in setup.py and erasing '#define USE_LIBV4L' in v4l2capture.cpp. + +libvideolive uses distutils to compile. For Linux: + +./setup.py build +sudo ./setup.py install + +Installation on Windows +===================== + +Modify setup.py to specify locations of libjpeg and pthread external libraries. + +With Visual Studio 2010: + +SET VS90COMNTOOLS=%VS100COMNTOOLS% +python setup.py build -c msvc +python setup.py install -libvideolive uses distutils. -To build: ./setup.py build -To build and install: ./setup.py install +Remember to put the libjpeg and pthread dlls somewhere appropriate. Example ======= diff --git a/setup.py b/setup.py index 419fd71..5dbe2e2 100755 --- a/setup.py +++ b/setup.py @@ -10,8 +10,6 @@ # 2013, Tim Sheerman-Chase # See README for license -# Visual Studio 2010 trick: SET VS90COMNTOOLS=%VS100COMNTOOLS% - from distutils.core import Extension, setup import os From dae34e0d63e30222733e8ce91591c84741df599a Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Fri, 15 Nov 2013 20:25:01 +0000 Subject: [PATCH 091/256] Update linux classes to use base method --- v4l2out.cpp | 15 +++++++++++++++ v4l2out.h | 3 +++ 2 files changed, 18 insertions(+) diff --git a/v4l2out.cpp b/v4l2out.cpp index ca8a592..9da3e93 100644 --- a/v4l2out.cpp +++ b/v4l2out.cpp @@ -333,6 +333,21 @@ int Video_out::WaitForStop() } } +void Video_out::SetOutputSize(int width, int height) +{ + pthread_mutex_lock(&this->lock); + this->outputWidth = width; + this->outputHeight = height; + pthread_mutex_unlock(&this->lock); +} + +void Video_out::SetOutputPxFmt(const char *fmt) +{ + pthread_mutex_lock(&this->lock); + this->outputPxFmt = fmt; + pthread_mutex_unlock(&this->lock); +} + void *Video_out_manager_Worker_thread(void *arg) { class Video_out *argobj = (class Video_out*) arg; diff --git a/v4l2out.h b/v4l2out.h index 841daa8..7003e44 100644 --- a/v4l2out.h +++ b/v4l2out.h @@ -35,6 +35,9 @@ class Video_out : public Base_Video_Out void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height); void Stop(); int WaitForStop(); + + void SetOutputSize(int width, int height); + void SetOutputPxFmt(const char *fmt); }; void *Video_out_manager_Worker_thread(void *arg); From f130977cf1e3b96f323612bdcabff435ef249537 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 18 Nov 2013 12:08:32 +0000 Subject: [PATCH 092/256] Move wmf implementation into separate source file --- mfvideoin.cpp | 52 +++++++++++++++++++++++++++++++++++++++++++++++++++ mfvideoin.h | 20 ++++++++++---------- 2 files changed, 62 insertions(+), 10 deletions(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index 9d73741..5ddb2f9 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -1,6 +1,58 @@ #include "mfvideoin.h" +MfVideoIn::MfVideoIn(const char *devName) : Base_Video_In() +{ + +} + +MfVideoIn::~MfVideoIn() +{ + +} + +void MfVideoIn::Stop() +{ + +} + +void MfVideoIn::WaitForStop() +{ + +} + +void MfVideoIn::OpenDevice() +{ + +} + +void MfVideoIn::SetFormat(const char *fmt, int width, int height) +{ + +} + +void MfVideoIn::StartDevice(int buffer_count) +{ + +} + +void MfVideoIn::StopDevice() +{ + +} + +void MfVideoIn::CloseDevice() +{ + +} + +int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) +{ + return 0; +} + +//************************************************************ + void *MfVideoIn_Worker_thread(void *arg) { class MfVideoIn *argobj = (class MfVideoIn*) arg; diff --git a/mfvideoin.h b/mfvideoin.h index 1292484..312f50f 100644 --- a/mfvideoin.h +++ b/mfvideoin.h @@ -9,17 +9,17 @@ class MfVideoIn : public Base_Video_In { public: - MfVideoIn(const char *devName) : Base_Video_In() {}; - virtual ~MfVideoIn() {}; + MfVideoIn(const char *devName); + virtual ~MfVideoIn(); - virtual void Stop() {}; - virtual void WaitForStop() {}; - virtual void OpenDevice() {}; - virtual void SetFormat(const char *fmt, int width, int height) {}; - virtual void StartDevice(int buffer_count) {}; - virtual void StopDevice() {}; - virtual void CloseDevice() {}; - virtual int GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) {return 0;}; + virtual void Stop(); + virtual void WaitForStop(); + virtual void OpenDevice(); + virtual void SetFormat(const char *fmt, int width, int height); + virtual void StartDevice(int buffer_count); + virtual void StopDevice(); + virtual void CloseDevice(); + virtual int GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut); }; void *MfVideoIn_Worker_thread(void *arg); From 863f71a1c855580085428ab1c01fa76de5cf5c6e Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 18 Nov 2013 14:14:09 +0000 Subject: [PATCH 093/256] Importing some source code and link against wmf --- mfvideoin.cpp | 466 ++++++++++++++++++++++++++++++++++++++++++++++++++ setup.py | 3 +- 2 files changed, 468 insertions(+), 1 deletion(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index 5ddb2f9..a2da421 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -1,6 +1,472 @@ +#include +#include +#include +#include +#include +using namespace std; + +#include +#include +#include +#include +#include + #include "mfvideoin.h" +#define MAX_DEVICE_ID_LEN 100 + +template void SafeRelease(T **ppT) +{ + if (*ppT) + { + (*ppT)->Release(); + *ppT = NULL; + } +} + +void PrintGuid(GUID guid) +{ + LPOLESTR lplpsz; + StringFromCLSID(guid, &lplpsz); + wcout << lplpsz << endl; + CoTaskMemFree(lplpsz); +} + +#ifndef IF_EQUAL_RETURN +#define IF_EQUAL_RETURN(param, val) if(val == param) return L#val +#endif + +LPCWSTR GetGUIDNameConst(const GUID& guid) +{ + //http://msdn.microsoft.com/en-us/library/windows/desktop/ee663602%28v=vs.85%29.aspx + IF_EQUAL_RETURN(guid, MF_MT_MAJOR_TYPE); + IF_EQUAL_RETURN(guid, MF_MT_MAJOR_TYPE); + IF_EQUAL_RETURN(guid, MF_MT_SUBTYPE); + IF_EQUAL_RETURN(guid, MF_MT_ALL_SAMPLES_INDEPENDENT); + IF_EQUAL_RETURN(guid, MF_MT_FIXED_SIZE_SAMPLES); + IF_EQUAL_RETURN(guid, MF_MT_COMPRESSED); + IF_EQUAL_RETURN(guid, MF_MT_SAMPLE_SIZE); + IF_EQUAL_RETURN(guid, MF_MT_WRAPPED_TYPE); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_NUM_CHANNELS); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_SAMPLES_PER_SECOND); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_FLOAT_SAMPLES_PER_SECOND); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_AVG_BYTES_PER_SECOND); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_BLOCK_ALIGNMENT); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_BITS_PER_SAMPLE); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_VALID_BITS_PER_SAMPLE); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_SAMPLES_PER_BLOCK); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_CHANNEL_MASK); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_FOLDDOWN_MATRIX); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_PEAKREF); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_PEAKTARGET); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_AVGREF); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_AVGTARGET); + IF_EQUAL_RETURN(guid, MF_MT_AUDIO_PREFER_WAVEFORMATEX); + IF_EQUAL_RETURN(guid, MF_MT_AAC_PAYLOAD_TYPE); + IF_EQUAL_RETURN(guid, MF_MT_AAC_AUDIO_PROFILE_LEVEL_INDICATION); + IF_EQUAL_RETURN(guid, MF_MT_FRAME_SIZE); + IF_EQUAL_RETURN(guid, MF_MT_FRAME_RATE); + IF_EQUAL_RETURN(guid, MF_MT_FRAME_RATE_RANGE_MAX); + IF_EQUAL_RETURN(guid, MF_MT_FRAME_RATE_RANGE_MIN); + IF_EQUAL_RETURN(guid, MF_MT_PIXEL_ASPECT_RATIO); + IF_EQUAL_RETURN(guid, MF_MT_DRM_FLAGS); + IF_EQUAL_RETURN(guid, MF_MT_PAD_CONTROL_FLAGS); + IF_EQUAL_RETURN(guid, MF_MT_SOURCE_CONTENT_HINT); + IF_EQUAL_RETURN(guid, MF_MT_VIDEO_CHROMA_SITING); + IF_EQUAL_RETURN(guid, MF_MT_INTERLACE_MODE); + IF_EQUAL_RETURN(guid, MF_MT_TRANSFER_FUNCTION); + IF_EQUAL_RETURN(guid, MF_MT_VIDEO_PRIMARIES); + IF_EQUAL_RETURN(guid, MF_MT_CUSTOM_VIDEO_PRIMARIES); + IF_EQUAL_RETURN(guid, MF_MT_YUV_MATRIX); + IF_EQUAL_RETURN(guid, MF_MT_VIDEO_LIGHTING); + IF_EQUAL_RETURN(guid, MF_MT_VIDEO_NOMINAL_RANGE); + IF_EQUAL_RETURN(guid, MF_MT_GEOMETRIC_APERTURE); + IF_EQUAL_RETURN(guid, MF_MT_MINIMUM_DISPLAY_APERTURE); + IF_EQUAL_RETURN(guid, MF_MT_PAN_SCAN_APERTURE); + IF_EQUAL_RETURN(guid, MF_MT_PAN_SCAN_ENABLED); + IF_EQUAL_RETURN(guid, MF_MT_AVG_BITRATE); + IF_EQUAL_RETURN(guid, MF_MT_AVG_BIT_ERROR_RATE); + IF_EQUAL_RETURN(guid, MF_MT_MAX_KEYFRAME_SPACING); + IF_EQUAL_RETURN(guid, MF_MT_DEFAULT_STRIDE); + IF_EQUAL_RETURN(guid, MF_MT_PALETTE); + IF_EQUAL_RETURN(guid, MF_MT_USER_DATA); + IF_EQUAL_RETURN(guid, MF_MT_AM_FORMAT_TYPE); + IF_EQUAL_RETURN(guid, MF_MT_MPEG_START_TIME_CODE); + IF_EQUAL_RETURN(guid, MF_MT_MPEG2_PROFILE); + IF_EQUAL_RETURN(guid, MF_MT_MPEG2_LEVEL); + IF_EQUAL_RETURN(guid, MF_MT_MPEG2_FLAGS); + IF_EQUAL_RETURN(guid, MF_MT_MPEG_SEQUENCE_HEADER); + IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_SRC_PACK_0); + IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_CTRL_PACK_0); + IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_SRC_PACK_1); + IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_CTRL_PACK_1); + IF_EQUAL_RETURN(guid, MF_MT_DV_VAUX_SRC_PACK); + IF_EQUAL_RETURN(guid, MF_MT_DV_VAUX_CTRL_PACK); + IF_EQUAL_RETURN(guid, MF_MT_ARBITRARY_HEADER); + IF_EQUAL_RETURN(guid, MF_MT_ARBITRARY_FORMAT); + IF_EQUAL_RETURN(guid, MF_MT_IMAGE_LOSS_TOLERANT); + IF_EQUAL_RETURN(guid, MF_MT_MPEG4_SAMPLE_DESCRIPTION); + IF_EQUAL_RETURN(guid, MF_MT_MPEG4_CURRENT_SAMPLE_ENTRY); + IF_EQUAL_RETURN(guid, MF_MT_ORIGINAL_4CC); + IF_EQUAL_RETURN(guid, MF_MT_ORIGINAL_WAVE_FORMAT_TAG); + + //IF_EQUAL_RETURN(guid, FORMAT_VideoInfo); //Dshow dependent + //IF_EQUAL_RETURN(guid, FORMAT_VideoInfo2); + + // Media types + + IF_EQUAL_RETURN(guid, MFMediaType_Audio); + IF_EQUAL_RETURN(guid, MFMediaType_Video); + IF_EQUAL_RETURN(guid, MFMediaType_Protected); + IF_EQUAL_RETURN(guid, MFMediaType_SAMI); + IF_EQUAL_RETURN(guid, MFMediaType_Script); + IF_EQUAL_RETURN(guid, MFMediaType_Image); + IF_EQUAL_RETURN(guid, MFMediaType_HTML); + IF_EQUAL_RETURN(guid, MFMediaType_Binary); + IF_EQUAL_RETURN(guid, MFMediaType_FileTransfer); + + IF_EQUAL_RETURN(guid, MFVideoFormat_AI44); // FCC('AI44') + IF_EQUAL_RETURN(guid, MFVideoFormat_ARGB32); // D3DFMT_A8R8G8B8 + IF_EQUAL_RETURN(guid, MFVideoFormat_AYUV); // FCC('AYUV') + IF_EQUAL_RETURN(guid, MFVideoFormat_DV25); // FCC('dv25') + IF_EQUAL_RETURN(guid, MFVideoFormat_DV50); // FCC('dv50') + IF_EQUAL_RETURN(guid, MFVideoFormat_DVH1); // FCC('dvh1') + IF_EQUAL_RETURN(guid, MFVideoFormat_DVSD); // FCC('dvsd') + IF_EQUAL_RETURN(guid, MFVideoFormat_DVSL); // FCC('dvsl') + IF_EQUAL_RETURN(guid, MFVideoFormat_H264); // FCC('H264') + IF_EQUAL_RETURN(guid, MFVideoFormat_I420); // FCC('I420') + IF_EQUAL_RETURN(guid, MFVideoFormat_IYUV); // FCC('IYUV') + IF_EQUAL_RETURN(guid, MFVideoFormat_M4S2); // FCC('M4S2') + IF_EQUAL_RETURN(guid, MFVideoFormat_MJPG); + IF_EQUAL_RETURN(guid, MFVideoFormat_MP43); // FCC('MP43') + IF_EQUAL_RETURN(guid, MFVideoFormat_MP4S); // FCC('MP4S') + IF_EQUAL_RETURN(guid, MFVideoFormat_MP4V); // FCC('MP4V') + IF_EQUAL_RETURN(guid, MFVideoFormat_MPG1); // FCC('MPG1') + IF_EQUAL_RETURN(guid, MFVideoFormat_MSS1); // FCC('MSS1') + IF_EQUAL_RETURN(guid, MFVideoFormat_MSS2); // FCC('MSS2') + IF_EQUAL_RETURN(guid, MFVideoFormat_NV11); // FCC('NV11') + IF_EQUAL_RETURN(guid, MFVideoFormat_NV12); // FCC('NV12') + IF_EQUAL_RETURN(guid, MFVideoFormat_P010); // FCC('P010') + IF_EQUAL_RETURN(guid, MFVideoFormat_P016); // FCC('P016') + IF_EQUAL_RETURN(guid, MFVideoFormat_P210); // FCC('P210') + IF_EQUAL_RETURN(guid, MFVideoFormat_P216); // FCC('P216') + IF_EQUAL_RETURN(guid, MFVideoFormat_RGB24); // D3DFMT_R8G8B8 + IF_EQUAL_RETURN(guid, MFVideoFormat_RGB32); // D3DFMT_X8R8G8B8 + IF_EQUAL_RETURN(guid, MFVideoFormat_RGB555); // D3DFMT_X1R5G5B5 + IF_EQUAL_RETURN(guid, MFVideoFormat_RGB565); // D3DFMT_R5G6B5 + IF_EQUAL_RETURN(guid, MFVideoFormat_RGB8); + IF_EQUAL_RETURN(guid, MFVideoFormat_UYVY); // FCC('UYVY') + IF_EQUAL_RETURN(guid, MFVideoFormat_v210); // FCC('v210') + IF_EQUAL_RETURN(guid, MFVideoFormat_v410); // FCC('v410') + IF_EQUAL_RETURN(guid, MFVideoFormat_WMV1); // FCC('WMV1') + IF_EQUAL_RETURN(guid, MFVideoFormat_WMV2); // FCC('WMV2') + IF_EQUAL_RETURN(guid, MFVideoFormat_WMV3); // FCC('WMV3') + IF_EQUAL_RETURN(guid, MFVideoFormat_WVC1); // FCC('WVC1') + IF_EQUAL_RETURN(guid, MFVideoFormat_Y210); // FCC('Y210') + IF_EQUAL_RETURN(guid, MFVideoFormat_Y216); // FCC('Y216') + IF_EQUAL_RETURN(guid, MFVideoFormat_Y410); // FCC('Y410') + IF_EQUAL_RETURN(guid, MFVideoFormat_Y416); // FCC('Y416') + IF_EQUAL_RETURN(guid, MFVideoFormat_Y41P); + IF_EQUAL_RETURN(guid, MFVideoFormat_Y41T); + IF_EQUAL_RETURN(guid, MFVideoFormat_YUY2); // FCC('YUY2') + IF_EQUAL_RETURN(guid, MFVideoFormat_YV12); // FCC('YV12') + IF_EQUAL_RETURN(guid, MFVideoFormat_YVYU); + + IF_EQUAL_RETURN(guid, MFAudioFormat_PCM); // WAVE_FORMAT_PCM + IF_EQUAL_RETURN(guid, MFAudioFormat_Float); // WAVE_FORMAT_IEEE_FLOAT + IF_EQUAL_RETURN(guid, MFAudioFormat_DTS); // WAVE_FORMAT_DTS + IF_EQUAL_RETURN(guid, MFAudioFormat_Dolby_AC3_SPDIF); // WAVE_FORMAT_DOLBY_AC3_SPDIF + IF_EQUAL_RETURN(guid, MFAudioFormat_DRM); // WAVE_FORMAT_DRM + IF_EQUAL_RETURN(guid, MFAudioFormat_WMAudioV8); // WAVE_FORMAT_WMAUDIO2 + IF_EQUAL_RETURN(guid, MFAudioFormat_WMAudioV9); // WAVE_FORMAT_WMAUDIO3 + IF_EQUAL_RETURN(guid, MFAudioFormat_WMAudio_Lossless); // WAVE_FORMAT_WMAUDIO_LOSSLESS + IF_EQUAL_RETURN(guid, MFAudioFormat_WMASPDIF); // WAVE_FORMAT_WMASPDIF + IF_EQUAL_RETURN(guid, MFAudioFormat_MSP1); // WAVE_FORMAT_WMAVOICE9 + IF_EQUAL_RETURN(guid, MFAudioFormat_MP3); // WAVE_FORMAT_MPEGLAYER3 + IF_EQUAL_RETURN(guid, MFAudioFormat_MPEG); // WAVE_FORMAT_MPEG + IF_EQUAL_RETURN(guid, MFAudioFormat_AAC); // WAVE_FORMAT_MPEG_HEAAC + IF_EQUAL_RETURN(guid, MFAudioFormat_ADTS); // WAVE_FORMAT_MPEG_ADTS_AAC + + return NULL; +} + +HRESULT GetDefaultStride(IMFMediaType *pType, LONG *plStride) +{ + LONG lStride = 0; + + // Try to get the default stride from the media type. + HRESULT hr = pType->GetUINT32(MF_MT_DEFAULT_STRIDE, (UINT32*)&lStride); + + if (FAILED(hr)) + { + // Attribute not set. Try to calculate the default stride. + + GUID subtype = GUID_NULL; + + UINT32 width = 0; + UINT32 height = 0; + // Get the subtype and the image size. + hr = pType->GetGUID(MF_MT_SUBTYPE, &subtype); + if (FAILED(hr)) + { + goto done; + } + hr = MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &width, &height); + if (FAILED(hr)) + { + goto done; + } + hr = MFGetStrideForBitmapInfoHeader(subtype.Data1, width, &lStride); + if (FAILED(hr)) + { + goto done; + } + + // Set the attribute for later reference. + (void)pType->SetUINT32(MF_MT_DEFAULT_STRIDE, UINT32(lStride)); + } + + if (SUCCEEDED(hr)) + { + *plStride = lStride; + } + +done: + return hr; +} + +DWORD SampleToStaticObj(IMFSample *pSample, char **buff) +{ + if(*buff!=NULL) + throw runtime_error("Buff ptr should be initially null"); + IMFMediaBuffer *ppBuffer = NULL; + HRESULT hr = pSample->ConvertToContiguousBuffer(&ppBuffer); + //cout << "ConvertToContiguousBuffer=" << SUCCEEDED(hr) << "\tstride="<< plStride << "\n"; + + IMF2DBuffer *m_p2DBuffer = NULL; + ppBuffer->QueryInterface(IID_IMF2DBuffer, (void**)&m_p2DBuffer); + //cout << "IMF2DBuffer=" << (m_p2DBuffer != NULL) << "\n"; + + DWORD pcbCurrentLength = 0; + BYTE *ppbBuffer = NULL; + DWORD pcbMaxLength = 0; + + if(SUCCEEDED(hr)) + { + + hr = ppBuffer->Lock(&ppbBuffer, &pcbMaxLength, &pcbCurrentLength); + //cout << "pcbMaxLength="<< pcbMaxLength << "\tpcbCurrentLength=" <Unlock(); + } + + if(ppBuffer) ppBuffer->Release(); + return pcbCurrentLength; +} + +/*void SetSampleMetaData(IMFSourceReader *pReader, DWORD streamIndex, PyObject *out) +{ + //Set meta data in output object + IMFMediaType *pCurrentType = NULL; + LONG plStride = 0; + GUID majorType=GUID_NULL, subType=GUID_NULL; + UINT32 width = 0; + UINT32 height = 0; + + HRESULT hr = pReader->GetCurrentMediaType(streamIndex, &pCurrentType); + if(!SUCCEEDED(hr)) cout << "Error 3\n"; + BOOL isComp = FALSE; + hr = pCurrentType->IsCompressedFormat(&isComp); + PyDict_SetItemStringAndDeleteVar(out, "isCompressed", PyBool_FromLong(isComp)); + hr = pCurrentType->GetGUID(MF_MT_MAJOR_TYPE, &majorType); + LPCWSTR typePtr = GetGUIDNameConst(majorType); + if(!SUCCEEDED(hr)) cout << "Error 4\n"; + hr = pCurrentType->GetGUID(MF_MT_SUBTYPE, &subType); + if(!SUCCEEDED(hr)) cout << "Error 5\n"; + int isVideo = (majorType==MFMediaType_Video); + if(isVideo) + { + GetDefaultStride(pCurrentType, &plStride); + hr = MFGetAttributeSize(pCurrentType, MF_MT_FRAME_SIZE, &width, &height); + if(!SUCCEEDED(hr)) cout << "Error 20\n"; + } + + LPCWSTR subTypePtr = GetGUIDNameConst(subType); + //if(subTypePtr!=0) wcout << "subtype\t" << subTypePtr << "\n"; + + PyDict_SetItemStringAndDeleteVar(out, "isCompressed", PyBool_FromLong(isComp)); + if(typePtr!=NULL) PyDict_SetItemStringAndDeleteVar(out, "type", PyUnicode_FromWideChar(typePtr, wcslen(typePtr))); + if(subTypePtr!=NULL) PyDict_SetItemStringAndDeleteVar(out, "subtype", PyUnicode_FromWideChar(subTypePtr, wcslen(subTypePtr))); + if(!isComp) PyDict_SetItemStringAndDeleteVar(out, "stride", PyInt_FromLong(plStride)); + PyDict_SetItemStringAndDeleteVar(out, "width", PyInt_FromLong(width)); + PyDict_SetItemStringAndDeleteVar(out, "height", PyInt_FromLong(height)); + +} +*/ + +class SourceReaderCB : public IMFSourceReaderCallback +{ + //http://msdn.microsoft.com/en-us/library/windows/desktop/gg583871%28v=vs.85%29.aspx +public: + LONG volatile m_nRefCount; + CRITICAL_SECTION lock; + int framePending; + unsigned int maxNumFrames; + + vector frameBuff; + vector frameLenBuff; + vector hrStatusBuff; + vector dwStreamIndexBuff; + vector dwStreamFlagsBuff; + vector llTimestampBuff; + + SourceReaderCB() + { + m_nRefCount = 0; + framePending = 0; + InitializeCriticalSection(&lock); + maxNumFrames = 10; + } + + virtual ~SourceReaderCB() + { + DeleteCriticalSection(&lock); + for(unsigned int i=0; iframeBuff.size(); i++) + delete [] this->frameBuff[i]; + } + + STDMETHODIMP QueryInterface(REFIID iid, void** ppv) + { + static const QITAB qit[] = + { + QITABENT(SourceReaderCB, IMFSourceReaderCallback), + { 0 }, + }; + return QISearch(this, qit, iid, ppv); + } + + STDMETHODIMP OnReadSample(HRESULT hrStatus, DWORD dwStreamIndex, + DWORD dwStreamFlags, LONGLONG llTimestamp, IMFSample *pSample) + { + //cout << "OnReadSample: " << llTimestamp << endl; + EnterCriticalSection(&lock); + + if (pSample && this->frameBuff.size() < this->maxNumFrames) + { + char *buff = NULL; + DWORD buffLen = SampleToStaticObj(pSample, &buff); + //cout << (long) buff << "," << buffLen << endl; + //if(buff!=NULL) delete [] buff; + + frameBuff.push_back(buff); + frameLenBuff.push_back(buffLen); + hrStatusBuff.push_back(hrStatus); + dwStreamIndexBuff.push_back(dwStreamIndex); + dwStreamFlagsBuff.push_back(dwStreamFlags); + llTimestampBuff.push_back(llTimestamp); + } + + this->framePending = 0; + LeaveCriticalSection(&lock); + return S_OK; + } + + STDMETHODIMP_(ULONG) AddRef() + { + return InterlockedIncrement(&m_nRefCount); + } + + STDMETHODIMP_(ULONG) Release() + { + ULONG uCount = InterlockedDecrement(&m_nRefCount); + if (uCount == 0) + { + //cout << "self destruct" << endl; + delete this; + } + return uCount; + } + + STDMETHODIMP OnEvent(DWORD, IMFMediaEvent *) + { + return S_OK; + } + + STDMETHODIMP OnFlush(DWORD) + { + return S_OK; + } + + void SetPending() + { + EnterCriticalSection(&lock); + this->framePending = 1; + LeaveCriticalSection(&lock); + } + + int GetPending() + { + EnterCriticalSection(&lock); + int pendingCopy = this->framePending; + LeaveCriticalSection(&lock); + return pendingCopy; + } + + void WaitForFrame() + { + while(1) + { + EnterCriticalSection(&lock); + int pendingCopy = this->framePending; + LeaveCriticalSection(&lock); + if (!pendingCopy) return; + Sleep(10); + } + } + + int GetFrame(HRESULT *hrStatus, DWORD *dwStreamIndex, + DWORD *dwStreamFlags, LONGLONG *llTimestamp, char **frame, DWORD *buffLen) + { + int ret = 0; + *hrStatus = S_OK; + *dwStreamIndex = 0; + *dwStreamFlags = 0; + *llTimestamp = 0; + *frame = NULL; + *buffLen = 0; + + EnterCriticalSection(&lock); + if(this->frameBuff.size()>0) + { + *frame = frameBuff[0]; + *buffLen = frameLenBuff[0]; + *hrStatus = hrStatusBuff[0]; + *dwStreamIndex = dwStreamIndexBuff[0]; + *dwStreamFlags = dwStreamFlagsBuff[0]; + *llTimestamp = llTimestampBuff[0]; + + this->frameBuff.erase(this->frameBuff.begin()); + this->frameLenBuff.erase(this->frameLenBuff.begin()); + this->hrStatusBuff.erase(this->hrStatusBuff.begin()); + this->dwStreamIndexBuff.erase(this->dwStreamIndexBuff.begin()); + this->dwStreamFlagsBuff.erase(this->dwStreamFlagsBuff.begin()); + this->llTimestampBuff.erase(this->llTimestampBuff.begin()); + ret = 1; + } + LeaveCriticalSection(&lock); + return ret; + } + +}; + + +//*************************************************************************** + MfVideoIn::MfVideoIn(const char *devName) : Base_Video_In() { diff --git a/setup.py b/setup.py index 5dbe2e2..76b48c9 100755 --- a/setup.py +++ b/setup.py @@ -19,7 +19,8 @@ library_dirs=['C:\Dev\Lib\libjpeg-turbo-win\lib', "C:\Dev\Lib\pthreads\pthreads.2"], include_dirs=['C:\Dev\Lib\libjpeg-turbo-win\include', "C:\Dev\Lib\pthreads\pthreads.2"], extra_link_args=["/MANIFEST"], - libraries = ["pthreadVC2", "jpeg"]) + libraries = ["pthreadVC2", "jpeg", "Mfplat", "Mf", "Mfreadwrite", "Ole32", "mfuuid", "Shlwapi"]) + else: videolive = Extension("videolive", ["v4l2capture.cpp", "v4l2out.cpp", "pixfmt.cpp", "libvideolive.cpp", "videoout.cpp", "videoin.cpp"], define_macros=[('_'+os.name.upper(), None)], From 38222eafcac90916b80feae34280cf567d8b439c Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 18 Nov 2013 14:42:51 +0000 Subject: [PATCH 094/256] Add test script --- mfvideoin.cpp | 49 ++++++++++++++++++++++++++++++++++++++++++++----- mfvideoin.h | 13 +++++++++++++ videoin.py | 14 ++++++++++++++ 3 files changed, 71 insertions(+), 5 deletions(-) create mode 100644 videoin.py diff --git a/mfvideoin.cpp b/mfvideoin.cpp index a2da421..a2a7b1a 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -7,8 +7,6 @@ using namespace std; #include -#include -#include #include #include @@ -467,14 +465,20 @@ class SourceReaderCB : public IMFSourceReaderCallback //*************************************************************************** -MfVideoIn::MfVideoIn(const char *devName) : Base_Video_In() +MfVideoIn::MfVideoIn(const char *devNameIn) : Base_Video_In() { - + this->initDone = 0; + this->asyncMode = 1; + this->devName = devNameIn; + this->reader = NULL; + this->source = NULL; + this->readerCallback = NULL; + this->InitWmf(); } MfVideoIn::~MfVideoIn() { - + this->DeinitWmf(); } void MfVideoIn::Stop() @@ -517,6 +521,41 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) return 0; } +//*************************************************************** + +void MfVideoIn::InitWmf() +{ + if(this->initDone) + throw runtime_error("Media Foundation init already done"); + + HRESULT hr = MFStartup(MF_VERSION); + if(!SUCCEEDED(hr)) + throw std::runtime_error("Media foundation startup failed"); + + hr = CoInitializeEx(NULL, COINIT_MULTITHREADED); + if(!SUCCEEDED(hr)) + throw std::runtime_error("CoInitializeEx failed"); + + this->initDone = true; +} + +void MfVideoIn::DeinitWmf() +{ + if(!this->initDone) + throw runtime_error("Media Foundation init not done"); + + SafeRelease(&reader); + reader = NULL; + SafeRelease(&source); + source = NULL; + + MFShutdown(); + + CoUninitialize(); + + this->initDone = false; +} + //************************************************************ void *MfVideoIn_Worker_thread(void *arg) diff --git a/mfvideoin.h b/mfvideoin.h index 312f50f..591edd4 100644 --- a/mfvideoin.h +++ b/mfvideoin.h @@ -4,6 +4,8 @@ #include #include +#include +#include #include "base.h" class MfVideoIn : public Base_Video_In @@ -20,6 +22,17 @@ class MfVideoIn : public Base_Video_In virtual void StopDevice(); virtual void CloseDevice(); virtual int GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut); + +protected: + virtual void InitWmf(); + virtual void DeinitWmf(); + + int initDone; + IMFSourceReader* reader; + IMFMediaSource* source; + int asyncMode; + std::string devName; + class SourceReaderCB* readerCallback; }; void *MfVideoIn_Worker_thread(void *arg); diff --git a/videoin.py b/videoin.py new file mode 100644 index 0000000..2a16517 --- /dev/null +++ b/videoin.py @@ -0,0 +1,14 @@ + +import videolive + +if __name__=="__main__": + inManager = videolive.Video_in_manager() + print inManager + + devs = inManager.list_devices() + print devs + + firstDev = open(devs[0]) + print firstDev + + \ No newline at end of file From 6921c01fee41cf728c737ed7697bc1bf05ea3c52 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 18 Nov 2013 15:17:02 +0000 Subject: [PATCH 095/256] Attempt to list sources --- mfvideoin.cpp | 163 ++++++++++++++++++++++++++++++++++++++------------ mfvideoin.h | 17 ++++-- videoin.cpp | 11 +++- 3 files changed, 146 insertions(+), 45 deletions(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index a2a7b1a..456df99 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -461,24 +461,42 @@ class SourceReaderCB : public IMFSourceReaderCallback } }; +//************************************************************************** + +WmfBase::WmfBase() : Base_Video_In() +{ + HRESULT hr = MFStartup(MF_VERSION); + if(!SUCCEEDED(hr)) + throw std::runtime_error("Media foundation startup failed"); + + hr = CoInitializeEx(NULL, COINIT_MULTITHREADED); + if(!SUCCEEDED(hr)) + throw std::runtime_error("CoInitializeEx failed"); +} + +WmfBase::~WmfBase() +{ + MFShutdown(); + + CoUninitialize(); +} //*************************************************************************** -MfVideoIn::MfVideoIn(const char *devNameIn) : Base_Video_In() +MfVideoIn::MfVideoIn(const char *devNameIn) : WmfBase() { - this->initDone = 0; this->asyncMode = 1; this->devName = devNameIn; this->reader = NULL; this->source = NULL; this->readerCallback = NULL; - this->InitWmf(); } MfVideoIn::~MfVideoIn() { - this->DeinitWmf(); + SafeRelease(&reader); + SafeRelease(&source); } void MfVideoIn::Stop() @@ -523,51 +541,122 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) //*************************************************************** -void MfVideoIn::InitWmf() +void *MfVideoIn_Worker_thread(void *arg) { - if(this->initDone) - throw runtime_error("Media Foundation init already done"); - - HRESULT hr = MFStartup(MF_VERSION); - if(!SUCCEEDED(hr)) - throw std::runtime_error("Media foundation startup failed"); - - hr = CoInitializeEx(NULL, COINIT_MULTITHREADED); - if(!SUCCEEDED(hr)) - throw std::runtime_error("CoInitializeEx failed"); + class MfVideoIn *argobj = (class MfVideoIn*) arg; + argobj->Run(); - this->initDone = true; + return NULL; } -void MfVideoIn::DeinitWmf() +//****************************************************************** + +class WmfListDevices : public WmfBase { - if(!this->initDone) - throw runtime_error("Media Foundation init not done"); +public: + WmfListDevices() : WmfBase() + { - SafeRelease(&reader); - reader = NULL; - SafeRelease(&source); - source = NULL; + } - MFShutdown(); + virtual ~WmfListDevices() + { - CoUninitialize(); + } - this->initDone = false; -} + + int EnumDevices(IMFActivate ***ppDevicesOut) + { + //Warning: the result from this function must be manually freed! + + //Allocate memory to store devices + IMFAttributes *pAttributes = NULL; + *ppDevicesOut = NULL; + HRESULT hr = MFCreateAttributes(&pAttributes, 1); + if(!SUCCEEDED(hr)) + throw std::runtime_error("MFCreateAttributes failed"); + + hr = pAttributes->SetGUID( + MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, + MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID + ); + if(!SUCCEEDED(hr)) + { + SafeRelease(&pAttributes); + throw std::runtime_error("SetGUID failed"); + } -//************************************************************ + //Get list of devices from media foundation + UINT32 count; + hr = MFEnumDeviceSources(pAttributes, ppDevicesOut, &count); + if(!SUCCEEDED(hr)) + { + SafeRelease(&pAttributes); + throw std::runtime_error("MFEnumDeviceSources failed"); + } -void *MfVideoIn_Worker_thread(void *arg) -{ - class MfVideoIn *argobj = (class MfVideoIn*) arg; - argobj->Run(); + SafeRelease(&pAttributes); + return count; + } - return NULL; -} + std::vector > ListDevices() + { + std::vector > out; + + IMFActivate **ppDevices = NULL; + int count = this->EnumDevices(&ppDevices); + + //For each device + for(int i=0; iGetAllocatedString( + MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME, + &vd_pFriendlyName, + NULL + ); + if(!SUCCEEDED(hr)) + { + SafeRelease(ppDevices); + CoTaskMemFree(vd_pFriendlyName); + throw std::runtime_error("GetAllocatedString failed"); + } + + wchar_t *symbolicLink = NULL; + hr = pActivate->GetAllocatedString( + MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, + &symbolicLink, + NULL + ); + if(!SUCCEEDED(hr)) + { + SafeRelease(ppDevices); + CoTaskMemFree(vd_pFriendlyName); + CoTaskMemFree(symbolicLink); + throw std::runtime_error("GetAllocatedString failed"); + } + + std::vector src; + src.push_back(symbolicLink); + src.push_back(vd_pFriendlyName); + out.push_back(src); + + CoTaskMemFree(vd_pFriendlyName); + CoTaskMemFree(symbolicLink); + } + + SafeRelease(ppDevices); + return out; + } +}; -std::vector List_in_devices() +std::vector > List_in_devices() { - std::vector out; + class WmfListDevices wmfListDevices; + std::vector > out = wmfListDevices.ListDevices(); + return out; -} \ No newline at end of file +} diff --git a/mfvideoin.h b/mfvideoin.h index 591edd4..5bc68c3 100644 --- a/mfvideoin.h +++ b/mfvideoin.h @@ -8,7 +8,15 @@ #include #include "base.h" -class MfVideoIn : public Base_Video_In +class WmfBase : public Base_Video_In +{ +public: + WmfBase(); + virtual ~WmfBase(); + +}; + +class MfVideoIn : public WmfBase { public: MfVideoIn(const char *devName); @@ -24,10 +32,7 @@ class MfVideoIn : public Base_Video_In virtual int GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut); protected: - virtual void InitWmf(); - virtual void DeinitWmf(); - - int initDone; + IMFSourceReader* reader; IMFMediaSource* source; int asyncMode; @@ -36,6 +41,6 @@ class MfVideoIn : public Base_Video_In }; void *MfVideoIn_Worker_thread(void *arg); -std::vector List_in_devices(); +std::vector > List_in_devices(); #endif //MFVIDEOIN_H diff --git a/videoin.cpp b/videoin.cpp index 1027e8f..a0aecb6 100644 --- a/videoin.cpp +++ b/videoin.cpp @@ -234,10 +234,17 @@ PyObject *Device_manager_close(Device_manager *self, PyObject *args) PyObject *Device_manager_list_devices(Device_manager *self) { PyObject *out = PyList_New(0); - std::vector devLi = List_in_devices(); + std::vector > devLi = List_in_devices(); for(unsigned i=0; i Date: Mon, 18 Nov 2013 15:17:29 +0000 Subject: [PATCH 096/256] Attempt to list sources --- mfvideoin.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index 456df99..b7035b7 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -601,6 +601,7 @@ class WmfListDevices : public WmfBase std::vector > ListDevices() { + cout << "a" << end; std::vector > out; IMFActivate **ppDevices = NULL; From 487f5ef180beefb35498081eef79d72ae533527e Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 18 Nov 2013 15:26:45 +0000 Subject: [PATCH 097/256] Attempt to list sources --- mfvideoin.cpp | 7 +++++-- videoin.cpp | 2 ++ videoin.py | 2 +- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index b7035b7..9a6c5d6 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -601,12 +601,11 @@ class WmfListDevices : public WmfBase std::vector > ListDevices() { - cout << "a" << end; std::vector > out; IMFActivate **ppDevices = NULL; int count = this->EnumDevices(&ppDevices); - + cout << "count" << count << endl; //For each device for(int i=0; i src; src.push_back(symbolicLink); src.push_back(vd_pFriendlyName); @@ -649,7 +649,10 @@ class WmfListDevices : public WmfBase CoTaskMemFree(symbolicLink); } + cout << "1, " << (long) ppDevices << endl; SafeRelease(ppDevices); + cout << "2" << endl; + return out; } }; diff --git a/videoin.cpp b/videoin.cpp index a0aecb6..b39b6cd 100644 --- a/videoin.cpp +++ b/videoin.cpp @@ -1,4 +1,5 @@ +#include #include "videoin.h" #include #ifdef _NT @@ -235,6 +236,7 @@ PyObject *Device_manager_list_devices(Device_manager *self) { PyObject *out = PyList_New(0); std::vector > devLi = List_in_devices(); + std::cout <<"z"<< std::endl; for(unsigned i=0; i Date: Mon, 18 Nov 2013 15:28:56 +0000 Subject: [PATCH 098/256] Attempt to list sources --- mfvideoin.cpp | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index 9a6c5d6..a30af3c 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -650,7 +650,10 @@ class WmfListDevices : public WmfBase } cout << "1, " << (long) ppDevices << endl; - SafeRelease(ppDevices); + if(ppDevices) + SafeRelease(ppDevices); + else + cout << "skip" << endl; cout << "2" << endl; return out; From 325dc058cebe6f21066b6c32a8304d0c7fafc1c8 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 18 Nov 2013 15:29:44 +0000 Subject: [PATCH 099/256] Avoid free null pointer --- mfvideoin.cpp | 4 ---- 1 file changed, 4 deletions(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index a30af3c..5bb988e 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -649,12 +649,8 @@ class WmfListDevices : public WmfBase CoTaskMemFree(symbolicLink); } - cout << "1, " << (long) ppDevices << endl; if(ppDevices) SafeRelease(ppDevices); - else - cout << "skip" << endl; - cout << "2" << endl; return out; } From 1b69ce5db891a59e1912b26a765c284f5e8f2094 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 18 Nov 2013 15:31:42 +0000 Subject: [PATCH 100/256] Remove debug code --- mfvideoin.cpp | 3 +-- videoin.cpp | 2 +- videoin.py | 4 ++++ 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index 5bb988e..b845dca 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -605,7 +605,7 @@ class WmfListDevices : public WmfBase IMFActivate **ppDevices = NULL; int count = this->EnumDevices(&ppDevices); - cout << "count" << count << endl; + //For each device for(int i=0; i src; src.push_back(symbolicLink); src.push_back(vd_pFriendlyName); diff --git a/videoin.cpp b/videoin.cpp index b39b6cd..704a017 100644 --- a/videoin.cpp +++ b/videoin.cpp @@ -236,7 +236,7 @@ PyObject *Device_manager_list_devices(Device_manager *self) { PyObject *out = PyList_New(0); std::vector > devLi = List_in_devices(); - std::cout <<"z"<< std::endl; + for(unsigned i=0; i Date: Mon, 18 Nov 2013 15:37:06 +0000 Subject: [PATCH 101/256] Remove debug code --- mfvideoin.cpp | 11 ++++++++++- mfvideoin.h | 1 + 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index b845dca..fd828dc 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -501,7 +501,7 @@ MfVideoIn::~MfVideoIn() void MfVideoIn::Stop() { - + } void MfVideoIn::WaitForStop() @@ -539,6 +539,15 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) return 0; } +void MfVideoIn::Run() +{ + while(1) + { + cout << "running" << endl; + Sleep(10); + } +} + //*************************************************************** void *MfVideoIn_Worker_thread(void *arg) diff --git a/mfvideoin.h b/mfvideoin.h index 5bc68c3..6b7bd39 100644 --- a/mfvideoin.h +++ b/mfvideoin.h @@ -31,6 +31,7 @@ class MfVideoIn : public WmfBase virtual void CloseDevice(); virtual int GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut); + void Run(); protected: IMFSourceReader* reader; From 29668c6c3fa5c8ac8c6bfe29a7ea5ec4019355f9 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 18 Nov 2013 15:50:08 +0000 Subject: [PATCH 102/256] Thread start and stop code --- mfvideoin.cpp | 35 ++++++++++++++++++++++++++++++----- mfvideoin.h | 3 +++ videoin.py | 7 ++++--- 3 files changed, 37 insertions(+), 8 deletions(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index fd828dc..fc1431f 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -491,27 +491,44 @@ MfVideoIn::MfVideoIn(const char *devNameIn) : WmfBase() this->reader = NULL; this->source = NULL; this->readerCallback = NULL; + this->stopping = 0; + this->stopped = 0; + InitializeCriticalSection(&lock); } MfVideoIn::~MfVideoIn() { + this->WaitForStop(); + SafeRelease(&reader); SafeRelease(&source); + DeleteCriticalSection(&lock); } void MfVideoIn::Stop() { - + EnterCriticalSection(&lock); + this->stopping = 1; + LeaveCriticalSection(&lock); } void MfVideoIn::WaitForStop() { + this->Stop(); + int waiting = 1; + while(waiting) + { + EnterCriticalSection(&lock); + waiting = !this->stopped; + LeaveCriticalSection(&lock); + Sleep(10); + } } void MfVideoIn::OpenDevice() { - + cout << "MfVideoIn::OpenDevice()" << endl; } void MfVideoIn::SetFormat(const char *fmt, int width, int height) @@ -521,7 +538,7 @@ void MfVideoIn::SetFormat(const char *fmt, int width, int height) void MfVideoIn::StartDevice(int buffer_count) { - + cout << "MfVideoIn::StartDevice()" << endl; } void MfVideoIn::StopDevice() @@ -531,7 +548,7 @@ void MfVideoIn::StopDevice() void MfVideoIn::CloseDevice() { - + cout << "MfVideoIn::CloseDevice()" << endl; } int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) @@ -541,11 +558,19 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) void MfVideoIn::Run() { + int running = 1; while(1) { - cout << "running" << endl; Sleep(10); + + EnterCriticalSection(&lock); + running = !this->stopping; + LeaveCriticalSection(&lock); } + + EnterCriticalSection(&lock); + this->stopped = 1; + LeaveCriticalSection(&lock); } //*************************************************************** diff --git a/mfvideoin.h b/mfvideoin.h index 6b7bd39..f6e9f8b 100644 --- a/mfvideoin.h +++ b/mfvideoin.h @@ -39,6 +39,9 @@ class MfVideoIn : public WmfBase int asyncMode; std::string devName; class SourceReaderCB* readerCallback; + int stopping; + int stopped; + CRITICAL_SECTION lock; }; void *MfVideoIn_Worker_thread(void *arg); diff --git a/videoin.py b/videoin.py index 09ced72..42166b8 100644 --- a/videoin.py +++ b/videoin.py @@ -1,5 +1,5 @@ -import videolive +import videolive, time if __name__=="__main__": inManager = videolive.Video_in_manager() @@ -12,7 +12,8 @@ print "No source devices detected" exit(0) - firstDev = open(devs[0][0]) + firstDev = inManager.open(devs[0][0]) print firstDev - \ No newline at end of file + for i in range(10): + time.sleep(1) From c4f14131bf7639edf9e99aa144bd97da7bfb6df1 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 18 Nov 2013 16:26:02 +0000 Subject: [PATCH 103/256] Thread start and stop code --- mfvideoin.cpp | 28 +++++++++++++++++++++++++--- mfvideoin.h | 4 ++++ 2 files changed, 29 insertions(+), 3 deletions(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index fc1431f..1f96b96 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -493,6 +493,11 @@ MfVideoIn::MfVideoIn(const char *devNameIn) : WmfBase() this->readerCallback = NULL; this->stopping = 0; this->stopped = 0; + + this->openDevFlag = 0; + this->startDevFlag = 0; + this->stopDevFlag = 0; + this->closeDevFlag = 0; InitializeCriticalSection(&lock); } @@ -529,6 +534,9 @@ void MfVideoIn::WaitForStop() void MfVideoIn::OpenDevice() { cout << "MfVideoIn::OpenDevice()" << endl; + EnterCriticalSection(&lock); + this->openDevFlag = 1; + LeaveCriticalSection(&lock); } void MfVideoIn::SetFormat(const char *fmt, int width, int height) @@ -539,16 +547,24 @@ void MfVideoIn::SetFormat(const char *fmt, int width, int height) void MfVideoIn::StartDevice(int buffer_count) { cout << "MfVideoIn::StartDevice()" << endl; + EnterCriticalSection(&lock); + this->startDevFlag = 1; + LeaveCriticalSection(&lock); } void MfVideoIn::StopDevice() { - + EnterCriticalSection(&lock); + this->stopDevFlag = 1; + LeaveCriticalSection(&lock); } void MfVideoIn::CloseDevice() { cout << "MfVideoIn::CloseDevice()" << endl; + EnterCriticalSection(&lock); + this->closeDevFlag = 1; + LeaveCriticalSection(&lock); } int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) @@ -561,11 +577,17 @@ void MfVideoIn::Run() int running = 1; while(1) { - Sleep(10); - EnterCriticalSection(&lock); running = !this->stopping; LeaveCriticalSection(&lock); + if(!running) continue; + + + + + + + Sleep(10); } EnterCriticalSection(&lock); diff --git a/mfvideoin.h b/mfvideoin.h index f6e9f8b..83ed86b 100644 --- a/mfvideoin.h +++ b/mfvideoin.h @@ -41,6 +41,10 @@ class MfVideoIn : public WmfBase class SourceReaderCB* readerCallback; int stopping; int stopped; + int openDevFlag; + int startDevFlag; + int stopDevFlag; + int closeDevFlag; CRITICAL_SECTION lock; }; From d5719460a870e071dea443c0ce871e001c6f78ef Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 18 Nov 2013 17:42:06 +0000 Subject: [PATCH 104/256] Camera is now active --- mfvideoin.cpp | 285 +++++++++++++++++++++++++++++++++++++++++++------- mfvideoin.h | 8 +- videoin.cpp | 9 +- videoin.py | 7 +- 4 files changed, 264 insertions(+), 45 deletions(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index 1f96b96..b638e09 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -13,6 +13,7 @@ using namespace std; #include "mfvideoin.h" #define MAX_DEVICE_ID_LEN 100 +int EnumDevices(IMFActivate ***ppDevicesOut); template void SafeRelease(T **ppT) { @@ -484,7 +485,7 @@ WmfBase::~WmfBase() //*************************************************************************** -MfVideoIn::MfVideoIn(const char *devNameIn) : WmfBase() +MfVideoIn::MfVideoIn(const wchar_t *devNameIn) : WmfBase() { this->asyncMode = 1; this->devName = devNameIn; @@ -572,29 +573,234 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) return 0; } +//*************************************************************** + void MfVideoIn::Run() { int running = 1; - while(1) + try + { + while(running) { EnterCriticalSection(&lock); running = !this->stopping; + int openDevFlagTmp = this->openDevFlag; + this->openDevFlag = 0; + int startDevFlagTmp = this->startDevFlag; + this->startDevFlag = 0; + int stopDevFlagTmp = this->stopDevFlag; + this->stopDevFlag = 0; + int closeDevFlagTmp = this->closeDevFlag; + this->closeDevFlag = 0; LeaveCriticalSection(&lock); if(!running) continue; + if(openDevFlagTmp) + { + this->OpenDeviceInternal(); + } + if(startDevFlagTmp) + { + this->StartDeviceInternal(); + } - - + if(this->reader != NULL) + this->ReadFramesInternal(); Sleep(10); } + } + catch(std::exception &err) + { + cout << err.what() << endl; + } EnterCriticalSection(&lock); this->stopped = 1; LeaveCriticalSection(&lock); } +void MfVideoIn::OpenDeviceInternal() +{ + //Check if source is already available + if(this->source != NULL) + throw runtime_error("Device already open"); + + //Open a new source + IMFActivate **ppDevices = NULL; + int count = EnumDevices(&ppDevices); + int devIndex = -1; + + //Find device + for(int i=0; iGetAllocatedString( + MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, + &symbolicLink, + NULL + ); + if(!SUCCEEDED(hr)) + { + SafeRelease(ppDevices); + throw std::runtime_error("GetAllocatedString failed"); + } + + if(wcscmp(symbolicLink, this->devName.c_str())==0) + { + devIndex = i; + } + CoTaskMemFree(symbolicLink); + } + + if(devIndex == -1) + throw runtime_error("Device not found"); + + IMFActivate *pActivate = ppDevices[devIndex]; + + //Activate device object + IMFMediaSource *sourceTmp = NULL; + HRESULT hr = pActivate->ActivateObject( + __uuidof(IMFMediaSource), + (void**)&sourceTmp + ); + if(!SUCCEEDED(hr)) + { + SafeRelease(ppDevices); + throw std::runtime_error("ActivateObject failed"); + } + + this->source = sourceTmp; + + SafeRelease(ppDevices); +} + +void MfVideoIn::StartDeviceInternal() +{ + //Create reader + IMFAttributes *pAttributes = NULL; + HRESULT hr = MFCreateAttributes(&pAttributes, 1); + if(!SUCCEEDED(hr)) + throw std::runtime_error("MFCreateAttributes failed"); + + if(source==NULL) + throw std::runtime_error("Source not open"); + + //Set attributes for reader + if(this->asyncMode) + { + this->readerCallback = new SourceReaderCB(); + + hr = pAttributes->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, this->readerCallback); + } + + IMFSourceReader *readerTmp = NULL; + hr = MFCreateSourceReaderFromMediaSource(this->source, pAttributes, &readerTmp); + if(!SUCCEEDED(hr)) + { + SafeRelease(&pAttributes); + throw std::runtime_error("MFCreateSourceReaderFromMediaSource failed"); + } + + this->reader = readerTmp; + + SafeRelease(&pAttributes); +} + +void MfVideoIn::ReadFramesInternal() +{ + //Check if reader is ready + if(this->reader == NULL) + throw std::runtime_error("Reader not ready for this source"); + + HRESULT hr = S_OK; + IMFSample *pSample = NULL; + DWORD streamIndex=0, flags=0; + LONGLONG llTimeStamp=0; + + if(this->asyncMode) + { + if(!this->readerCallback->GetPending()) + { + hr = this->reader->ReadSample( + MF_SOURCE_READER_ANY_STREAM, // Stream index. + 0, NULL, NULL, NULL, NULL + ); + this->readerCallback->SetPending(); + } + + HRESULT hrStatus = S_OK; + DWORD dwStreamIndex = 0; + DWORD dwStreamFlags = 0; + LONGLONG llTimestamp = 0; + char *frame = NULL; + DWORD buffLen = 0; + + int found = this->readerCallback->GetFrame(&hrStatus, &dwStreamIndex, + &dwStreamFlags, &llTimestamp, &frame, &buffLen); + + //cout << (long) frame << "," << buffLen << endl; + if(found) + { + if((frame == NULL) != (buffLen == 0)) + throw runtime_error("Frame buffer corruption detected"); + /*PyObject* out = StaticObjToPythonObj(this->reader, + streamIndex, + flags, + llTimeStamp, + frame, buffLen);*/ + if(frame) delete [] frame; + + //SetSampleMetaData(this->reader, streamIndex, out); + + + return; + } + else + return; + } + else + { + hr = this->reader->ReadSample( + MF_SOURCE_READER_ANY_STREAM, // Stream index. + 0, // Flags. + &streamIndex, // Receives the actual stream index. + &flags, // Receives status flags. + &llTimeStamp, // Receives the time stamp. + &pSample // Receives the sample or NULL. + ); + + if (FAILED(hr)) + { + return; + } + + if(pSample!=NULL) + { + char *frame = NULL; + DWORD buffLen = SampleToStaticObj(pSample, &frame); + + /*PyObject* out = StaticObjToPythonObj(pReader, + streamIndex, + flags, + llTimeStamp, + frame, buffLen);*/ + + //SetSampleMetaData(pReader, streamIndex, out); + + + pSample->Release(); + if(frame != NULL) delete [] frame; + return; + } + + if(pSample) pSample->Release(); + } + +} + //*************************************************************** void *MfVideoIn_Worker_thread(void *arg) @@ -607,52 +813,51 @@ void *MfVideoIn_Worker_thread(void *arg) //****************************************************************** -class WmfListDevices : public WmfBase +int EnumDevices(IMFActivate ***ppDevicesOut) { -public: - WmfListDevices() : WmfBase() - { + //Warning: the result from this function must be manually freed! - } + //Allocate memory to store devices + IMFAttributes *pAttributes = NULL; + *ppDevicesOut = NULL; + HRESULT hr = MFCreateAttributes(&pAttributes, 1); + if(!SUCCEEDED(hr)) + throw std::runtime_error("MFCreateAttributes failed"); - virtual ~WmfListDevices() + hr = pAttributes->SetGUID( + MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, + MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID + ); + if(!SUCCEEDED(hr)) { - + SafeRelease(&pAttributes); + throw std::runtime_error("SetGUID failed"); } - - int EnumDevices(IMFActivate ***ppDevicesOut) + //Get list of devices from media foundation + UINT32 count; + hr = MFEnumDeviceSources(pAttributes, ppDevicesOut, &count); + if(!SUCCEEDED(hr)) { - //Warning: the result from this function must be manually freed! + SafeRelease(&pAttributes); + throw std::runtime_error("MFEnumDeviceSources failed"); + } - //Allocate memory to store devices - IMFAttributes *pAttributes = NULL; - *ppDevicesOut = NULL; - HRESULT hr = MFCreateAttributes(&pAttributes, 1); - if(!SUCCEEDED(hr)) - throw std::runtime_error("MFCreateAttributes failed"); + SafeRelease(&pAttributes); + return count; +} - hr = pAttributes->SetGUID( - MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, - MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID - ); - if(!SUCCEEDED(hr)) - { - SafeRelease(&pAttributes); - throw std::runtime_error("SetGUID failed"); - } +class WmfListDevices : public WmfBase +{ +public: + WmfListDevices() : WmfBase() + { - //Get list of devices from media foundation - UINT32 count; - hr = MFEnumDeviceSources(pAttributes, ppDevicesOut, &count); - if(!SUCCEEDED(hr)) - { - SafeRelease(&pAttributes); - throw std::runtime_error("MFEnumDeviceSources failed"); - } + } + + virtual ~WmfListDevices() + { - SafeRelease(&pAttributes); - return count; } std::vector > ListDevices() @@ -660,7 +865,7 @@ class WmfListDevices : public WmfBase std::vector > out; IMFActivate **ppDevices = NULL; - int count = this->EnumDevices(&ppDevices); + int count = EnumDevices(&ppDevices); //For each device for(int i=0; ithreadArgStore)[devarg] = threadArgs; diff --git a/videoin.py b/videoin.py index 42166b8..1fd7fb6 100644 --- a/videoin.py +++ b/videoin.py @@ -12,8 +12,11 @@ print "No source devices detected" exit(0) - firstDev = inManager.open(devs[0][0]) - print firstDev + inManager.open(devs[0][0]) + + time.sleep(1) + inManager.start(devs[0][0]) + for i in range(10): time.sleep(1) From 20a91901e6312d3347b26c10ea2817fc7aa214aa Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Mon, 18 Nov 2013 18:13:21 +0000 Subject: [PATCH 105/256] Update linux api --- v4l2capture.cpp | 23 ++++++++++++++++++++--- v4l2capture.h | 2 +- videoin.py | 21 ++++++++++++++++++--- 3 files changed, 39 insertions(+), 7 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 1386d4d..07e076b 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -14,6 +14,7 @@ #include #include #include +#include #include "pixfmt.h" @@ -82,6 +83,18 @@ int my_ioctl(int fd, int request, void *arg, int utimeout = -1) } } +std::wstring CharArrayToWString(const char *in) +{ + size_t inLen = strlen(in)+1; + wchar_t *tmpDevName = new wchar_t[inLen]; + //size_t returnValue; + mbstowcs(tmpDevName, in, inLen); + //mbstowcs_s(&returnValue, tmpDevName, inLen, in, inLen); + std::wstring tmpDevName2(tmpDevName); + delete [] tmpDevName; + return tmpDevName2; +} + // ************************************************************************** Video_in_Manager::Video_in_Manager(const char *devNameIn) : Base_Video_In() @@ -619,9 +632,9 @@ void *Video_in_Worker_thread(void *arg) return NULL; } -std::vector List_in_devices() +std::vector > List_in_devices() { - std::vector out; + std::vector > out; const char dir[] = "/dev"; DIR *dp; struct dirent *dirp; @@ -633,8 +646,12 @@ std::vector List_in_devices() while ((dirp = readdir(dp)) != NULL) { if (strncmp(dirp->d_name, "video", 5) != 0) continue; std::string tmp = "/dev/"; + std::vector row; tmp.append(dirp->d_name); - out.push_back(tmp); + + std::wstring tmpDevName = CharArrayToWString(tmp.c_str()); + row.push_back(tmpDevName); + out.push_back(row); } closedir(dp); return out; diff --git a/v4l2capture.h b/v4l2capture.h index ac725ec..d87ab3c 100644 --- a/v4l2capture.h +++ b/v4l2capture.h @@ -124,7 +124,7 @@ class Video_in_Manager : public Base_Video_In void *Video_in_Worker_thread(void *arg); -std::vector List_in_devices(); +std::vector > List_in_devices(); // ********************************************************************** diff --git a/videoin.py b/videoin.py index 1fd7fb6..a5aa754 100644 --- a/videoin.py +++ b/videoin.py @@ -16,7 +16,22 @@ time.sleep(1) inManager.start(devs[0][0]) - + count = 0 + + while count < 10: + time.sleep(0.01) + frame = inManager.get_frame(devs[0][0]) + if frame is None: continue + print len(frame[0]), frame[1] + count += 1 + + inManager.stop(devs[0][0]) + + time.sleep(1) + + inManager.close(devs[0][0]) + + time.sleep(1) + + del inManager - for i in range(10): - time.sleep(1) From c311c0e494bd05e62d4022b0198e6e1d7a41e97b Mon Sep 17 00:00:00 2001 From: TimSC Date: Tue, 19 Nov 2013 10:43:47 +0000 Subject: [PATCH 106/256] Stop device in a clean way --- mfvideoin.cpp | 35 +++++++++++++++++++++++++++++++---- mfvideoin.h | 2 ++ 2 files changed, 33 insertions(+), 4 deletions(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index b638e09..2bbfb81 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -596,18 +596,20 @@ void MfVideoIn::Run() if(!running) continue; if(openDevFlagTmp) - { this->OpenDeviceInternal(); - } if(startDevFlagTmp) - { this->StartDeviceInternal(); - } if(this->reader != NULL) this->ReadFramesInternal(); + if(stopDevFlagTmp) + this->StopDeviceInternal(); + + if(closeDevFlagTmp) + this->CloseDeviceInternal(); + Sleep(10); } } @@ -801,6 +803,31 @@ void MfVideoIn::ReadFramesInternal() } +void MfVideoIn::StopDeviceInternal() +{ + cout << "MfVideoIn::StopDeviceInternal()" << endl; + if(this->reader == NULL) + throw runtime_error("Device is not running"); + + //Shut down reader + SafeRelease(&this->reader); + + //Reader callback seems to automatically delete + this->readerCallback = NULL; + +} + +void MfVideoIn::CloseDeviceInternal() +{ + cout << "MfVideoIn::CloseDeviceInternal()" << endl; + + if(this->source == NULL) + throw runtime_error("Device is not open"); + + //Shut down source + SafeRelease(&this->source); +} + //*************************************************************** void *MfVideoIn_Worker_thread(void *arg) diff --git a/mfvideoin.h b/mfvideoin.h index ded0aba..6f3f81c 100644 --- a/mfvideoin.h +++ b/mfvideoin.h @@ -50,6 +50,8 @@ class MfVideoIn : public WmfBase void OpenDeviceInternal(); void StartDeviceInternal(); void ReadFramesInternal(); + void StopDeviceInternal(); + void CloseDeviceInternal(); }; void *MfVideoIn_Worker_thread(void *arg); From 92a15aa98beef863358e13a7779b6fdfdff9a59e Mon Sep 17 00:00:00 2001 From: TimSC Date: Tue, 19 Nov 2013 10:50:26 +0000 Subject: [PATCH 107/256] Give camera time to stop --- mfvideoin.cpp | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index 2bbfb81..cf184d5 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -583,7 +583,7 @@ void MfVideoIn::Run() while(running) { EnterCriticalSection(&lock); - running = !this->stopping; + running = !this->stopping || this->stopDevFlag || this->closeDevFlag; int openDevFlagTmp = this->openDevFlag; this->openDevFlag = 0; int startDevFlagTmp = this->startDevFlag; @@ -805,7 +805,6 @@ void MfVideoIn::ReadFramesInternal() void MfVideoIn::StopDeviceInternal() { - cout << "MfVideoIn::StopDeviceInternal()" << endl; if(this->reader == NULL) throw runtime_error("Device is not running"); @@ -819,8 +818,6 @@ void MfVideoIn::StopDeviceInternal() void MfVideoIn::CloseDeviceInternal() { - cout << "MfVideoIn::CloseDeviceInternal()" << endl; - if(this->source == NULL) throw runtime_error("Device is not open"); From 28edf7faae49acd300b187c7aa03993707ca8819 Mon Sep 17 00:00:00 2001 From: TimSC Date: Tue, 19 Nov 2013 10:50:50 +0000 Subject: [PATCH 108/256] Remove old test scripts --- capture_picture.py | 41 ----------------------------- capture_picture_delayed.py | 54 -------------------------------------- list_devices.py | 28 -------------------- 3 files changed, 123 deletions(-) delete mode 100755 capture_picture.py delete mode 100755 capture_picture_delayed.py delete mode 100755 list_devices.py diff --git a/capture_picture.py b/capture_picture.py deleted file mode 100755 index 6e47d25..0000000 --- a/capture_picture.py +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/python -# python-v4l2capture -# Python extension to capture video with video4linux2 -# -# 2009, 2010, 2011 Fredrik Portstrom, released into the public domain -# 2011, Joakim Gebart -# 2013, Tim Sheerman-Chase -# See README for license - -import Image -import select -import v4l2capture - -# Open the video device. -video = v4l2capture.Video_device("/dev/video0") - -# Suggest an image size to the device. The device may choose and -# return another size if it doesn't support the suggested one. -size_x, size_y = video.set_format(1280, 1024) - -# Create a buffer to store image data in. This must be done before -# calling 'start' if v4l2capture is compiled with libv4l2. Otherwise -# raises IOError. -video.create_buffers(1) - -# Send the buffer to the device. Some devices require this to be done -# before calling 'start'. -video.queue_all_buffers() - -# Start the device. This lights the LED if it's a camera that has one. -video.start() - -# Wait for the device to fill the buffer. -select.select((video,), (), ()) - -# The rest is easy :-) -image_data = video.read() -video.close() -image = Image.fromstring("RGB", (size_x, size_y), image_data) -image.save("image.jpg") -print "Saved image.jpg (Size: " + str(size_x) + " x " + str(size_y) + ")" diff --git a/capture_picture_delayed.py b/capture_picture_delayed.py deleted file mode 100755 index 0f2b988..0000000 --- a/capture_picture_delayed.py +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/python -# -# python-v4l2capture -# -# This file is an example on how to capture a picture with -# python-v4l2capture. It waits between starting the video device and -# capturing the picture, to get a good picture from cameras that -# require a delay to get enough brightness. It does not work with some -# devices that require starting to capture pictures immediatly when -# the device is started. -# -# python-v4l2capture -# Python extension to capture video with video4linux2 -# -# 2009, 2010, 2011 Fredrik Portstrom, released into the public domain -# 2011, Joakim Gebart -# 2013, Tim Sheerman-Chase -# See README for license - -import Image -import select -import time -import v4l2capture - -# Open the video device. -video = v4l2capture.Video_device("/dev/video0") - -# Suggest an image size to the device. The device may choose and -# return another size if it doesn't support the suggested one. -size_x, size_y = video.set_format(1280, 1024) - -# Create a buffer to store image data in. This must be done before -# calling 'start' if v4l2capture is compiled with libv4l2. Otherwise -# raises IOError. -video.create_buffers(1) - -# Start the device. This lights the LED if it's a camera that has one. -video.start() - -# Wait a little. Some cameras take a few seconds to get bright enough. -time.sleep(2) - -# Send the buffer to the device. -video.queue_all_buffers() - -# Wait for the device to fill the buffer. -select.select((video,), (), ()) - -# The rest is easy :-) -image_data = video.read() -video.close() -image = Image.fromstring("RGB", (size_x, size_y), image_data) -image.save("image.jpg") -print "Saved image.jpg (Size: " + str(size_x) + " x " + str(size_y) + ")" diff --git a/list_devices.py b/list_devices.py deleted file mode 100755 index 75f3820..0000000 --- a/list_devices.py +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/python -# -# python-v4l2capture -# -# python-v4l2capture -# Python extension to capture video with video4linux2 -# -# 2009, 2010, 2011 Fredrik Portstrom, released into the public domain -# 2011, Joakim Gebart -# 2013, Tim Sheerman-Chase -# See README for license - -import os -import v4l2capture -file_names = [x for x in os.listdir("/dev") if x.startswith("video")] -file_names.sort() -for file_name in file_names: - path = "/dev/" + file_name - print path - try: - video = v4l2capture.Video_device(path) - driver, card, bus_info, capabilities = video.get_info() - print " driver: %s\n card: %s" \ - "\n bus info: %s\n capabilities: %s" % ( - driver, card, bus_info, ", ".join(capabilities)) - video.close() - except IOError, e: - print " " + str(e) From b89479dd8614fdd3ce0b54f2fe6b5d04528afae2 Mon Sep 17 00:00:00 2001 From: TimSC Date: Tue, 19 Nov 2013 11:39:07 +0000 Subject: [PATCH 109/256] Frame is returned but not meta data --- mfvideoin.cpp | 103 +++++++++++++++++++++++++++++++++++++++++--------- mfvideoin.h | 8 ++++ 2 files changed, 94 insertions(+), 17 deletions(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index cf184d5..191e23e 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -499,6 +499,7 @@ MfVideoIn::MfVideoIn(const wchar_t *devNameIn) : WmfBase() this->startDevFlag = 0; this->stopDevFlag = 0; this->closeDevFlag = 0; + this->maxBuffSize = 10; InitializeCriticalSection(&lock); } @@ -570,7 +571,32 @@ void MfVideoIn::CloseDevice() int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) { - return 0; + if(buffOut==NULL) + throw runtime_error("Buffer ptr cannot be null"); + if(metaOut==NULL) + throw runtime_error("Meta data pointer cannot be null"); + + if(this->frameBuff.size() == 0) + return 0; + + *buffOut = (unsigned char *)this->frameBuff[0]; + + metaOut->fmt = "RGB24"; + metaOut->width; + metaOut->height; + metaOut->buffLen = this->frameLenBuff[0]; + metaOut->sequence; + metaOut->tv_sec; + metaOut->tv_usec; + + this->frameBuff.erase(this->frameBuff.begin()); + this->frameLenBuff.erase(this->frameLenBuff.begin()); + this->hrStatusBuff.erase(this->hrStatusBuff.begin()); + this->dwStreamIndexBuff.erase(this->dwStreamIndexBuff.begin()); + this->dwStreamFlagsBuff.erase(this->dwStreamFlagsBuff.begin()); + this->llTimestampBuff.erase(this->llTimestampBuff.begin()); + + return 1; } //*************************************************************** @@ -748,16 +774,36 @@ void MfVideoIn::ReadFramesInternal() { if((frame == NULL) != (buffLen == 0)) throw runtime_error("Frame buffer corruption detected"); - /*PyObject* out = StaticObjToPythonObj(this->reader, - streamIndex, - flags, - llTimeStamp, - frame, buffLen);*/ - if(frame) delete [] frame; - - //SetSampleMetaData(this->reader, streamIndex, out); - - + + EnterCriticalSection(&lock); + + //Ensure the buffer does not overflow + while(this->frameBuff.size() >= this->maxBuffSize) + { + this->frameBuff.erase(this->frameBuff.begin()); + this->frameLenBuff.erase(this->frameLenBuff.begin()); + this->hrStatusBuff.erase(this->hrStatusBuff.begin()); + this->dwStreamIndexBuff.erase(this->dwStreamIndexBuff.begin()); + this->dwStreamFlagsBuff.erase(this->dwStreamFlagsBuff.begin()); + this->llTimestampBuff.erase(this->llTimestampBuff.begin()); + } + + //Copy frame to output buffer + if(this->frameBuff.size() < this->maxBuffSize) + { + this->frameBuff.push_back(frame); + this->frameLenBuff.push_back(buffLen); + this->hrStatusBuff.push_back(hrStatus); + this->dwStreamIndexBuff.push_back(dwStreamIndex); + this->dwStreamFlagsBuff.push_back(dwStreamFlags); + this->llTimestampBuff.push_back(llTimestamp); + } + else + { + delete [] frame; + } + + LeaveCriticalSection(&lock); return; } else @@ -784,17 +830,40 @@ void MfVideoIn::ReadFramesInternal() char *frame = NULL; DWORD buffLen = SampleToStaticObj(pSample, &frame); - /*PyObject* out = StaticObjToPythonObj(pReader, - streamIndex, - flags, - llTimeStamp, - frame, buffLen);*/ + EnterCriticalSection(&lock); + + //Ensure the buffer does not overflow + while(this->frameBuff.size() >= this->maxBuffSize) + { + this->frameBuff.erase(this->frameBuff.begin()); + this->frameLenBuff.erase(this->frameLenBuff.begin()); + this->hrStatusBuff.erase(this->hrStatusBuff.begin()); + this->dwStreamIndexBuff.erase(this->dwStreamIndexBuff.begin()); + this->dwStreamFlagsBuff.erase(this->dwStreamFlagsBuff.begin()); + this->llTimestampBuff.erase(this->llTimestampBuff.begin()); + } + + //Copy frame to output buffer + if(this->frameBuff.size() < this->maxBuffSize) + { + this->frameBuff.push_back(frame); + this->frameLenBuff.push_back(buffLen); + this->hrStatusBuff.push_back(hr); + this->dwStreamIndexBuff.push_back(streamIndex); + this->dwStreamFlagsBuff.push_back(flags); + this->llTimestampBuff.push_back(llTimeStamp); + } + else + { + delete [] frame; + } + + LeaveCriticalSection(&lock); //SetSampleMetaData(pReader, streamIndex, out); pSample->Release(); - if(frame != NULL) delete [] frame; return; } diff --git a/mfvideoin.h b/mfvideoin.h index 6f3f81c..b05a4a3 100644 --- a/mfvideoin.h +++ b/mfvideoin.h @@ -46,6 +46,14 @@ class MfVideoIn : public WmfBase int stopDevFlag; int closeDevFlag; CRITICAL_SECTION lock; + unsigned maxBuffSize; + + std::vector frameBuff; + std::vector frameLenBuff; + std::vector hrStatusBuff; + std::vector dwStreamIndexBuff; + std::vector dwStreamFlagsBuff; + std::vector llTimestampBuff; void OpenDeviceInternal(); void StartDeviceInternal(); From 32497ea4be015c162982c5a0bc795e680a74c61d Mon Sep 17 00:00:00 2001 From: TimSC Date: Tue, 19 Nov 2013 15:58:47 +0000 Subject: [PATCH 110/256] Added meta data but enabled code crashes --- mfvideoin.cpp | 112 +++++++++++++++++++++++++++++--------------------- mfvideoin.h | 7 ++++ setup.py | 15 ++++++- 3 files changed, 86 insertions(+), 48 deletions(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index 191e23e..3d9125f 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -268,46 +268,6 @@ DWORD SampleToStaticObj(IMFSample *pSample, char **buff) return pcbCurrentLength; } -/*void SetSampleMetaData(IMFSourceReader *pReader, DWORD streamIndex, PyObject *out) -{ - //Set meta data in output object - IMFMediaType *pCurrentType = NULL; - LONG plStride = 0; - GUID majorType=GUID_NULL, subType=GUID_NULL; - UINT32 width = 0; - UINT32 height = 0; - - HRESULT hr = pReader->GetCurrentMediaType(streamIndex, &pCurrentType); - if(!SUCCEEDED(hr)) cout << "Error 3\n"; - BOOL isComp = FALSE; - hr = pCurrentType->IsCompressedFormat(&isComp); - PyDict_SetItemStringAndDeleteVar(out, "isCompressed", PyBool_FromLong(isComp)); - hr = pCurrentType->GetGUID(MF_MT_MAJOR_TYPE, &majorType); - LPCWSTR typePtr = GetGUIDNameConst(majorType); - if(!SUCCEEDED(hr)) cout << "Error 4\n"; - hr = pCurrentType->GetGUID(MF_MT_SUBTYPE, &subType); - if(!SUCCEEDED(hr)) cout << "Error 5\n"; - int isVideo = (majorType==MFMediaType_Video); - if(isVideo) - { - GetDefaultStride(pCurrentType, &plStride); - hr = MFGetAttributeSize(pCurrentType, MF_MT_FRAME_SIZE, &width, &height); - if(!SUCCEEDED(hr)) cout << "Error 20\n"; - } - - LPCWSTR subTypePtr = GetGUIDNameConst(subType); - //if(subTypePtr!=0) wcout << "subtype\t" << subTypePtr << "\n"; - - PyDict_SetItemStringAndDeleteVar(out, "isCompressed", PyBool_FromLong(isComp)); - if(typePtr!=NULL) PyDict_SetItemStringAndDeleteVar(out, "type", PyUnicode_FromWideChar(typePtr, wcslen(typePtr))); - if(subTypePtr!=NULL) PyDict_SetItemStringAndDeleteVar(out, "subtype", PyUnicode_FromWideChar(subTypePtr, wcslen(subTypePtr))); - if(!isComp) PyDict_SetItemStringAndDeleteVar(out, "stride", PyInt_FromLong(plStride)); - PyDict_SetItemStringAndDeleteVar(out, "width", PyInt_FromLong(width)); - PyDict_SetItemStringAndDeleteVar(out, "height", PyInt_FromLong(height)); - -} -*/ - class SourceReaderCB : public IMFSourceReaderCallback { //http://msdn.microsoft.com/en-us/library/windows/desktop/gg583871%28v=vs.85%29.aspx @@ -585,9 +545,9 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) metaOut->width; metaOut->height; metaOut->buffLen = this->frameLenBuff[0]; - metaOut->sequence; - metaOut->tv_sec; - metaOut->tv_usec; + metaOut->sequence = 0; + metaOut->tv_sec = (unsigned long)(this->llTimestampBuff[0] / 1e7); //in 100-nanosecond units + metaOut->tv_usec = (unsigned long)((this->llTimestampBuff[0] - metaOut->tv_sec * 1e7) / 10); this->frameBuff.erase(this->frameBuff.begin()); this->frameLenBuff.erase(this->frameLenBuff.begin()); @@ -596,6 +556,13 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) this->dwStreamFlagsBuff.erase(this->dwStreamFlagsBuff.begin()); this->llTimestampBuff.erase(this->llTimestampBuff.begin()); + /*this->plStrideBuff.erase(this->plStrideBuff.begin()); + this->majorTypeBuff.erase(this->majorTypeBuff.begin()); + this->subTypeBuff.erase(this->subTypeBuff.begin()); + this->widthBuff.erase(this->widthBuff.begin()); + this->heightBuff.erase(this->heightBuff.begin()); + this->isCompressedBuff.erase(this->isCompressedBuff.begin());*/ + return 1; } @@ -737,6 +704,44 @@ void MfVideoIn::StartDeviceInternal() SafeRelease(&pAttributes); } +void MfVideoIn::SetSampleMetaData(DWORD streamIndex) +{ + //Set meta data in output object + /*IMFMediaType *pCurrentType = NULL; + LONG plStride = 0; + GUID majorType=GUID_NULL, subType=GUID_NULL; + UINT32 width = 0; + UINT32 height = 0; + + HRESULT hr = this->reader->GetCurrentMediaType(streamIndex, &pCurrentType); + if(!SUCCEEDED(hr)) cout << "Error 3\n"; + BOOL isComp = FALSE; + hr = pCurrentType->IsCompressedFormat(&isComp); + //PyDict_SetItemStringAndDeleteVar(out, "isCompressed", PyBool_FromLong(isComp)); + hr = pCurrentType->GetGUID(MF_MT_MAJOR_TYPE, &majorType); + LPCWSTR typePtr = GetGUIDNameConst(majorType); + if(!SUCCEEDED(hr)) cout << "Error 4\n"; + hr = pCurrentType->GetGUID(MF_MT_SUBTYPE, &subType); + if(!SUCCEEDED(hr)) cout << "Error 5\n"; + int isVideo = (majorType==MFMediaType_Video); + if(isVideo) + { + GetDefaultStride(pCurrentType, &plStride); + hr = MFGetAttributeSize(pCurrentType, MF_MT_FRAME_SIZE, &width, &height); + if(!SUCCEEDED(hr)) cout << "Error 20\n"; + } + + LPCWSTR subTypePtr = GetGUIDNameConst(subType); + + this->plStrideBuff.push_back(plStride); + this->majorTypeBuff.push_back(typePtr); + this->subTypeBuff.push_back(subTypePtr); + this->widthBuff.push_back(width); + this->heightBuff.push_back(height); + this->isCompressedBuff.push_back(isComp);*/ + +} + void MfVideoIn::ReadFramesInternal() { //Check if reader is ready @@ -786,6 +791,13 @@ void MfVideoIn::ReadFramesInternal() this->dwStreamIndexBuff.erase(this->dwStreamIndexBuff.begin()); this->dwStreamFlagsBuff.erase(this->dwStreamFlagsBuff.begin()); this->llTimestampBuff.erase(this->llTimestampBuff.begin()); + + /*this->plStrideBuff.erase(this->plStrideBuff.begin()); + this->majorTypeBuff.erase(this->majorTypeBuff.begin()); + this->subTypeBuff.erase(this->subTypeBuff.begin()); + this->widthBuff.erase(this->widthBuff.begin()); + this->heightBuff.erase(this->heightBuff.begin()); + this->isCompressedBuff.erase(this->isCompressedBuff.begin());*/ } //Copy frame to output buffer @@ -797,6 +809,8 @@ void MfVideoIn::ReadFramesInternal() this->dwStreamIndexBuff.push_back(dwStreamIndex); this->dwStreamFlagsBuff.push_back(dwStreamFlags); this->llTimestampBuff.push_back(llTimestamp); + + this->SetSampleMetaData(dwStreamIndex); } else { @@ -841,6 +855,13 @@ void MfVideoIn::ReadFramesInternal() this->dwStreamIndexBuff.erase(this->dwStreamIndexBuff.begin()); this->dwStreamFlagsBuff.erase(this->dwStreamFlagsBuff.begin()); this->llTimestampBuff.erase(this->llTimestampBuff.begin()); + + /*this->plStrideBuff.erase(this->plStrideBuff.begin()); + this->majorTypeBuff.erase(this->majorTypeBuff.begin()); + this->subTypeBuff.erase(this->subTypeBuff.begin()); + this->widthBuff.erase(this->widthBuff.begin()); + this->heightBuff.erase(this->heightBuff.begin()); + this->isCompressedBuff.erase(this->isCompressedBuff.begin());*/ } //Copy frame to output buffer @@ -852,6 +873,8 @@ void MfVideoIn::ReadFramesInternal() this->dwStreamIndexBuff.push_back(streamIndex); this->dwStreamFlagsBuff.push_back(flags); this->llTimestampBuff.push_back(llTimeStamp); + + this->SetSampleMetaData(streamIndex); } else { @@ -860,9 +883,6 @@ void MfVideoIn::ReadFramesInternal() LeaveCriticalSection(&lock); - //SetSampleMetaData(pReader, streamIndex, out); - - pSample->Release(); return; } diff --git a/mfvideoin.h b/mfvideoin.h index b05a4a3..ce0e0c3 100644 --- a/mfvideoin.h +++ b/mfvideoin.h @@ -55,8 +55,15 @@ class MfVideoIn : public WmfBase std::vector dwStreamFlagsBuff; std::vector llTimestampBuff; + std::vector plStrideBuff; + std::vector majorTypeBuff, subTypeBuff; + std::vector widthBuff; + std::vector heightBuff; + std::vector isCompressedBuff; + void OpenDeviceInternal(); void StartDeviceInternal(); + void SetSampleMetaData(DWORD streamIndex); void ReadFramesInternal(); void StopDeviceInternal(); void CloseDeviceInternal(); diff --git a/setup.py b/setup.py index 76b48c9..18daa36 100755 --- a/setup.py +++ b/setup.py @@ -13,12 +13,23 @@ from distutils.core import Extension, setup import os +debug = 1 + + if os.name == "nt": + if debug: + c_args=['/Zi', '/EHsc'] + l_args=["/MANIFEST", "/DEBUG"] + else: + c_args=[] + l_args=["/MANIFEST"] + videolive = Extension("videolive", ["pixfmt.cpp", "libvideolive.cpp", "videoout.cpp", "videoin.cpp", "mfvideoin.cpp", "mfvideoout.cpp"], define_macros=[('_'+os.name.upper(), None)], library_dirs=['C:\Dev\Lib\libjpeg-turbo-win\lib', "C:\Dev\Lib\pthreads\pthreads.2"], - include_dirs=['C:\Dev\Lib\libjpeg-turbo-win\include', "C:\Dev\Lib\pthreads\pthreads.2"], - extra_link_args=["/MANIFEST"], + include_dirs=['C:\Dev\Lib\libjpeg-turbo-win\include', "C:\Dev\Lib\pthreads\pthreads.2"], + extra_compile_args=c_args, + extra_link_args=l_args, libraries = ["pthreadVC2", "jpeg", "Mfplat", "Mf", "Mfreadwrite", "Ole32", "mfuuid", "Shlwapi"]) else: From b927971bc5afc267eb3778186d5386bb42b66936 Mon Sep 17 00:00:00 2001 From: TimSC Date: Tue, 19 Nov 2013 16:11:27 +0000 Subject: [PATCH 111/256] Reorganise code but still crashes --- mfvideoin.cpp | 40 ++++++++++++++++++---------------------- mfvideoin.h | 1 + 2 files changed, 19 insertions(+), 22 deletions(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index 3d9125f..fd39aaf 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -556,12 +556,7 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) this->dwStreamFlagsBuff.erase(this->dwStreamFlagsBuff.begin()); this->llTimestampBuff.erase(this->llTimestampBuff.begin()); - /*this->plStrideBuff.erase(this->plStrideBuff.begin()); - this->majorTypeBuff.erase(this->majorTypeBuff.begin()); - this->subTypeBuff.erase(this->subTypeBuff.begin()); - this->widthBuff.erase(this->widthBuff.begin()); - this->heightBuff.erase(this->heightBuff.begin()); - this->isCompressedBuff.erase(this->isCompressedBuff.begin());*/ + this->PopFrontMetaDataBuff(); return 1; } @@ -732,16 +727,27 @@ void MfVideoIn::SetSampleMetaData(DWORD streamIndex) } LPCWSTR subTypePtr = GetGUIDNameConst(subType); - - this->plStrideBuff.push_back(plStride); - this->majorTypeBuff.push_back(typePtr); - this->subTypeBuff.push_back(subTypePtr); + */ + //this->plStrideBuff.push_back(plStride); + //std::wstring tmp(L"test"); + //this->majorTypeBuff.push_back(tmp); + /*this->subTypeBuff.push_back(subTypePtr); this->widthBuff.push_back(width); this->heightBuff.push_back(height); this->isCompressedBuff.push_back(isComp);*/ } +void MfVideoIn::PopFrontMetaDataBuff() +{ + //this->plStrideBuff.erase(this->plStrideBuff.begin()); + //if(this->majorTypeBuff.size()>0) this->majorTypeBuff.erase(this->majorTypeBuff.begin()); + /*this->subTypeBuff.erase(this->subTypeBuff.begin()); + this->widthBuff.erase(this->widthBuff.begin()); + this->heightBuff.erase(this->heightBuff.begin()); + this->isCompressedBuff.erase(this->isCompressedBuff.begin());*/ +} + void MfVideoIn::ReadFramesInternal() { //Check if reader is ready @@ -792,12 +798,7 @@ void MfVideoIn::ReadFramesInternal() this->dwStreamFlagsBuff.erase(this->dwStreamFlagsBuff.begin()); this->llTimestampBuff.erase(this->llTimestampBuff.begin()); - /*this->plStrideBuff.erase(this->plStrideBuff.begin()); - this->majorTypeBuff.erase(this->majorTypeBuff.begin()); - this->subTypeBuff.erase(this->subTypeBuff.begin()); - this->widthBuff.erase(this->widthBuff.begin()); - this->heightBuff.erase(this->heightBuff.begin()); - this->isCompressedBuff.erase(this->isCompressedBuff.begin());*/ + this->PopFrontMetaDataBuff(); } //Copy frame to output buffer @@ -856,12 +857,7 @@ void MfVideoIn::ReadFramesInternal() this->dwStreamFlagsBuff.erase(this->dwStreamFlagsBuff.begin()); this->llTimestampBuff.erase(this->llTimestampBuff.begin()); - /*this->plStrideBuff.erase(this->plStrideBuff.begin()); - this->majorTypeBuff.erase(this->majorTypeBuff.begin()); - this->subTypeBuff.erase(this->subTypeBuff.begin()); - this->widthBuff.erase(this->widthBuff.begin()); - this->heightBuff.erase(this->heightBuff.begin()); - this->isCompressedBuff.erase(this->isCompressedBuff.begin());*/ + this->PopFrontMetaDataBuff(); } //Copy frame to output buffer diff --git a/mfvideoin.h b/mfvideoin.h index ce0e0c3..99491ed 100644 --- a/mfvideoin.h +++ b/mfvideoin.h @@ -64,6 +64,7 @@ class MfVideoIn : public WmfBase void OpenDeviceInternal(); void StartDeviceInternal(); void SetSampleMetaData(DWORD streamIndex); + void PopFrontMetaDataBuff(); void ReadFramesInternal(); void StopDeviceInternal(); void CloseDeviceInternal(); From 48f35320f3c51fafdc5766fd9a9ef53b5bb6cd8b Mon Sep 17 00:00:00 2001 From: TimSC Date: Tue, 19 Nov 2013 16:26:22 +0000 Subject: [PATCH 112/256] Forgot to lock the shared data --- mfvideoin.cpp | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index fd39aaf..8786f49 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -536,8 +536,13 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) if(metaOut==NULL) throw runtime_error("Meta data pointer cannot be null"); + EnterCriticalSection(&lock); + if(this->frameBuff.size() == 0) + { + LeaveCriticalSection(&lock); return 0; + } *buffOut = (unsigned char *)this->frameBuff[0]; @@ -558,6 +563,8 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) this->PopFrontMetaDataBuff(); + LeaveCriticalSection(&lock); + return 1; } @@ -702,7 +709,7 @@ void MfVideoIn::StartDeviceInternal() void MfVideoIn::SetSampleMetaData(DWORD streamIndex) { //Set meta data in output object - /*IMFMediaType *pCurrentType = NULL; + IMFMediaType *pCurrentType = NULL; LONG plStride = 0; GUID majorType=GUID_NULL, subType=GUID_NULL; UINT32 width = 0; @@ -712,7 +719,6 @@ void MfVideoIn::SetSampleMetaData(DWORD streamIndex) if(!SUCCEEDED(hr)) cout << "Error 3\n"; BOOL isComp = FALSE; hr = pCurrentType->IsCompressedFormat(&isComp); - //PyDict_SetItemStringAndDeleteVar(out, "isCompressed", PyBool_FromLong(isComp)); hr = pCurrentType->GetGUID(MF_MT_MAJOR_TYPE, &majorType); LPCWSTR typePtr = GetGUIDNameConst(majorType); if(!SUCCEEDED(hr)) cout << "Error 4\n"; @@ -727,8 +733,8 @@ void MfVideoIn::SetSampleMetaData(DWORD streamIndex) } LPCWSTR subTypePtr = GetGUIDNameConst(subType); - */ - //this->plStrideBuff.push_back(plStride); + + this->plStrideBuff.push_back(plStride); //std::wstring tmp(L"test"); //this->majorTypeBuff.push_back(tmp); /*this->subTypeBuff.push_back(subTypePtr); @@ -740,7 +746,7 @@ void MfVideoIn::SetSampleMetaData(DWORD streamIndex) void MfVideoIn::PopFrontMetaDataBuff() { - //this->plStrideBuff.erase(this->plStrideBuff.begin()); + if(this->plStrideBuff.size()>0) this->plStrideBuff.erase(this->plStrideBuff.begin()); //if(this->majorTypeBuff.size()>0) this->majorTypeBuff.erase(this->majorTypeBuff.begin()); /*this->subTypeBuff.erase(this->subTypeBuff.begin()); this->widthBuff.erase(this->widthBuff.begin()); From 6af2bb6761a6fd6cbc22b4f39e1842d49ccaf9e4 Mon Sep 17 00:00:00 2001 From: TimSC Date: Tue, 19 Nov 2013 16:37:07 +0000 Subject: [PATCH 113/256] Set correct pixel format --- mfvideoin.cpp | 30 ++++++++++++++++++++---------- setup.py | 3 +-- 2 files changed, 21 insertions(+), 12 deletions(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index 8786f49..b9659ef 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -546,9 +546,20 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) *buffOut = (unsigned char *)this->frameBuff[0]; - metaOut->fmt = "RGB24"; - metaOut->width; - metaOut->height; + //wcout << this->majorTypeBuff[0] << "," << this->subTypeBuff[0] << endl; + + if(wcscmp(this->subTypeBuff[0].c_str(), L"MFVideoFormat_YUY2")==0) + { + metaOut->fmt = "YUY2"; + + //Do conversion to rgb + //TODO + } + else + metaOut->fmt = "Unknown"; + + metaOut->width = this->widthBuff[0]; + metaOut->height = this->heightBuff[0]; metaOut->buffLen = this->frameLenBuff[0]; metaOut->sequence = 0; metaOut->tv_sec = (unsigned long)(this->llTimestampBuff[0] / 1e7); //in 100-nanosecond units @@ -735,23 +746,22 @@ void MfVideoIn::SetSampleMetaData(DWORD streamIndex) LPCWSTR subTypePtr = GetGUIDNameConst(subType); this->plStrideBuff.push_back(plStride); - //std::wstring tmp(L"test"); - //this->majorTypeBuff.push_back(tmp); - /*this->subTypeBuff.push_back(subTypePtr); + this->majorTypeBuff.push_back(typePtr); + this->subTypeBuff.push_back(subTypePtr); this->widthBuff.push_back(width); this->heightBuff.push_back(height); - this->isCompressedBuff.push_back(isComp);*/ + this->isCompressedBuff.push_back(isComp); } void MfVideoIn::PopFrontMetaDataBuff() { if(this->plStrideBuff.size()>0) this->plStrideBuff.erase(this->plStrideBuff.begin()); - //if(this->majorTypeBuff.size()>0) this->majorTypeBuff.erase(this->majorTypeBuff.begin()); - /*this->subTypeBuff.erase(this->subTypeBuff.begin()); + if(this->majorTypeBuff.size()>0) this->majorTypeBuff.erase(this->majorTypeBuff.begin()); + this->subTypeBuff.erase(this->subTypeBuff.begin()); this->widthBuff.erase(this->widthBuff.begin()); this->heightBuff.erase(this->heightBuff.begin()); - this->isCompressedBuff.erase(this->isCompressedBuff.begin());*/ + this->isCompressedBuff.erase(this->isCompressedBuff.begin()); } void MfVideoIn::ReadFramesInternal() diff --git a/setup.py b/setup.py index 18daa36..116269a 100755 --- a/setup.py +++ b/setup.py @@ -13,8 +13,7 @@ from distutils.core import Extension, setup import os -debug = 1 - +debug = 0 if os.name == "nt": if debug: From 164e90e9bafd18175bd4220ab8490e8b05851bfd Mon Sep 17 00:00:00 2001 From: TimSC Date: Tue, 19 Nov 2013 16:51:28 +0000 Subject: [PATCH 114/256] Attempt conversion but not working yet --- mfvideoin.cpp | 35 ++++++++++++++++++++++++++++------- 1 file changed, 28 insertions(+), 7 deletions(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index b9659ef..fdcbc5c 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -11,6 +11,7 @@ using namespace std; #include #include "mfvideoin.h" +#include "pixfmt.h" #define MAX_DEVICE_ID_LEN 100 int EnumDevices(IMFActivate ***ppDevicesOut); @@ -544,23 +545,43 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) return 0; } - *buffOut = (unsigned char *)this->frameBuff[0]; + unsigned char* currentBuff = (unsigned char *)this->frameBuff[0]; + std::string currentPixFmt = "Unknown"; + unsigned currentBuffLen = this->frameLenBuff[0]; //wcout << this->majorTypeBuff[0] << "," << this->subTypeBuff[0] << endl; if(wcscmp(this->subTypeBuff[0].c_str(), L"MFVideoFormat_YUY2")==0) + currentPixFmt = "YUY2"; + + //Do conversion to rgb + unsigned char *buffConv = NULL; + unsigned buffConvLen; + int ok = DecodeFrame(currentBuff, currentBuffLen, + currentPixFmt.c_str(), + this->widthBuff[0], this->heightBuff[0], + "RGB24", + &buffConv, + &buffConvLen); + + if(ok) { - metaOut->fmt = "YUY2"; - - //Do conversion to rgb - //TODO + delete [] currentBuff; //Now unneeded + currentBuff = buffConv; + currentPixFmt = "RGB24"; + currentBuffLen = buffConvLen; } else - metaOut->fmt = "Unknown"; + { + cout << "Cannot convert from pix format "; + wcout << this->subTypeBuff[0] << endl; + } + *buffOut = currentBuff; + metaOut->fmt = currentPixFmt; metaOut->width = this->widthBuff[0]; metaOut->height = this->heightBuff[0]; - metaOut->buffLen = this->frameLenBuff[0]; + metaOut->buffLen = currentBuffLen; metaOut->sequence = 0; metaOut->tv_sec = (unsigned long)(this->llTimestampBuff[0] / 1e7); //in 100-nanosecond units metaOut->tv_usec = (unsigned long)((this->llTimestampBuff[0] - metaOut->tv_sec * 1e7) / 10); From 91bdbbf464d7600afe13c428318ef10d5a25253a Mon Sep 17 00:00:00 2001 From: TimSC Date: Tue, 19 Nov 2013 16:55:26 +0000 Subject: [PATCH 115/256] Decode yuy2 as yuyv --- mfvideoin.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index fdcbc5c..9ed2bce 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -552,7 +552,7 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) //wcout << this->majorTypeBuff[0] << "," << this->subTypeBuff[0] << endl; if(wcscmp(this->subTypeBuff[0].c_str(), L"MFVideoFormat_YUY2")==0) - currentPixFmt = "YUY2"; + currentPixFmt = "YUYV"; //YUYV = YUY2 //Do conversion to rgb unsigned char *buffConv = NULL; From 7efd1a023a2b8e65cd86c6348344a1158bc4034e Mon Sep 17 00:00:00 2001 From: TimSC Date: Tue, 19 Nov 2013 17:44:41 +0000 Subject: [PATCH 116/256] Change com init flags --- mfvideoin.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index 9ed2bce..c92681b 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -431,8 +431,8 @@ WmfBase::WmfBase() : Base_Video_In() if(!SUCCEEDED(hr)) throw std::runtime_error("Media foundation startup failed"); - hr = CoInitializeEx(NULL, COINIT_MULTITHREADED); - if(!SUCCEEDED(hr)) + hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED); + if(hr == RPC_E_CHANGED_MODE) throw std::runtime_error("CoInitializeEx failed"); } From fb1295018d950c758e9be1c192c6e0f99827093f Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Wed, 20 Nov 2013 13:57:20 +0000 Subject: [PATCH 117/256] Save link to msdn --- mfvideoout.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mfvideoout.cpp b/mfvideoout.cpp index aff4c27..9dc9050 100644 --- a/mfvideoout.cpp +++ b/mfvideoout.cpp @@ -1,6 +1,8 @@ #include "mfvideoout.h" +//http://msdn.microsoft.com/en-us/library/windows/desktop/ms700134%28v=vs.85%29.aspx + void *MfVideoOut_Worker_thread(void *arg) { class MfVideoOut *argobj = (class MfVideoOut*) arg; From 944ad327647a227fcd4b08de6c4de75680649375 Mon Sep 17 00:00:00 2001 From: TimSC Date: Wed, 20 Nov 2013 14:17:30 +0000 Subject: [PATCH 118/256] Move mf video output implementation to cpp file --- mfvideoout.cpp | 43 +++++++++++++++++++++++++++++++++++++++++++ mfvideoout.h | 17 ++++++++++------- 2 files changed, 53 insertions(+), 7 deletions(-) diff --git a/mfvideoout.cpp b/mfvideoout.cpp index 9dc9050..0e7d6ad 100644 --- a/mfvideoout.cpp +++ b/mfvideoout.cpp @@ -3,6 +3,49 @@ //http://msdn.microsoft.com/en-us/library/windows/desktop/ms700134%28v=vs.85%29.aspx + +MfVideoOut::MfVideoOut(const char *devName) : Base_Video_Out() +{ + +} + +MfVideoOut::~MfVideoOut() +{ + +} + +void MfVideoOut::SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) +{ + +} + +void MfVideoOut::Stop() +{ + +} + +int MfVideoOut::WaitForStop() +{ + return 1; +} + +void MfVideoOut::SetOutputSize(int width, int height) +{ + +} + +void MfVideoOut::SetOutputPxFmt(const char *fmt) +{ + +} + +void MfVideoOut::Run() +{ + +} + +//******************************************************************************* + void *MfVideoOut_Worker_thread(void *arg) { class MfVideoOut *argobj = (class MfVideoOut*) arg; diff --git a/mfvideoout.h b/mfvideoout.h index d430eef..f406a65 100644 --- a/mfvideoout.h +++ b/mfvideoout.h @@ -9,15 +9,18 @@ class MfVideoOut : public Base_Video_Out { public: - MfVideoOut(const char *devName) : Base_Video_Out() {}; - virtual ~MfVideoOut() {}; + MfVideoOut(const char *devName); + virtual ~MfVideoOut(); - void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) {}; - void Stop() {}; - int WaitForStop() {return 1;}; + void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height); + void Stop(); + int WaitForStop(); + + virtual void SetOutputSize(int width, int height); + virtual void SetOutputPxFmt(const char *fmt); + + void Run(); - virtual void SetOutputSize(int width, int height) {}; - virtual void SetOutputPxFmt(const char *fmt) {}; }; void *MfVideoOut_Worker_thread(void *arg); From 74b8b9956948543a94839d85ec6c4f3513db36b9 Mon Sep 17 00:00:00 2001 From: TimSC Date: Wed, 20 Nov 2013 14:26:11 +0000 Subject: [PATCH 119/256] Add project to test windows video out --- wintest/wintest.sln | 20 ++++++++ wintest/wintest/ReadMe.txt | 40 +++++++++++++++ wintest/wintest/wintest.cpp | 13 +++++ wintest/wintest/wintest.vcxproj | 89 +++++++++++++++++++++++++++++++++ 4 files changed, 162 insertions(+) create mode 100644 wintest/wintest.sln create mode 100644 wintest/wintest/ReadMe.txt create mode 100644 wintest/wintest/wintest.cpp create mode 100644 wintest/wintest/wintest.vcxproj diff --git a/wintest/wintest.sln b/wintest/wintest.sln new file mode 100644 index 0000000..d1b9159 --- /dev/null +++ b/wintest/wintest.sln @@ -0,0 +1,20 @@ + +Microsoft Visual Studio Solution File, Format Version 11.00 +# Visual Studio 2010 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "wintest", "wintest\wintest.vcxproj", "{6152A268-A7CF-4E82-8E0B-A2FDC4342AFD}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Win32 = Debug|Win32 + Release|Win32 = Release|Win32 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {6152A268-A7CF-4E82-8E0B-A2FDC4342AFD}.Debug|Win32.ActiveCfg = Debug|Win32 + {6152A268-A7CF-4E82-8E0B-A2FDC4342AFD}.Debug|Win32.Build.0 = Debug|Win32 + {6152A268-A7CF-4E82-8E0B-A2FDC4342AFD}.Release|Win32.ActiveCfg = Release|Win32 + {6152A268-A7CF-4E82-8E0B-A2FDC4342AFD}.Release|Win32.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/wintest/wintest/ReadMe.txt b/wintest/wintest/ReadMe.txt new file mode 100644 index 0000000..e69e676 --- /dev/null +++ b/wintest/wintest/ReadMe.txt @@ -0,0 +1,40 @@ +======================================================================== + CONSOLE APPLICATION : wintest Project Overview +======================================================================== + +AppWizard has created this wintest application for you. + +This file contains a summary of what you will find in each of the files that +make up your wintest application. + + +wintest.vcxproj + This is the main project file for VC++ projects generated using an Application Wizard. + It contains information about the version of Visual C++ that generated the file, and + information about the platforms, configurations, and project features selected with the + Application Wizard. + +wintest.vcxproj.filters + This is the filters file for VC++ projects generated using an Application Wizard. + It contains information about the association between the files in your project + and the filters. This association is used in the IDE to show grouping of files with + similar extensions under a specific node (for e.g. ".cpp" files are associated with the + "Source Files" filter). + +wintest.cpp + This is the main application source file. + +///////////////////////////////////////////////////////////////////////////// +Other standard files: + +StdAfx.h, StdAfx.cpp + These files are used to build a precompiled header (PCH) file + named wintest.pch and a precompiled types file named StdAfx.obj. + +///////////////////////////////////////////////////////////////////////////// +Other notes: + +AppWizard uses "TODO:" comments to indicate parts of the source code you +should add to or customize. + +///////////////////////////////////////////////////////////////////////////// diff --git a/wintest/wintest/wintest.cpp b/wintest/wintest/wintest.cpp new file mode 100644 index 0000000..aa3d083 --- /dev/null +++ b/wintest/wintest/wintest.cpp @@ -0,0 +1,13 @@ +// wintest.cpp : Defines the entry point for the console application. +// + +//#include "stdafx.h" +#include "../../mfvideoout.h" + +int main(int argc, char* argv[]) +{ + class MfVideoOut mfVideoOut("test"); + + return 0; +} + diff --git a/wintest/wintest/wintest.vcxproj b/wintest/wintest/wintest.vcxproj new file mode 100644 index 0000000..86f7578 --- /dev/null +++ b/wintest/wintest/wintest.vcxproj @@ -0,0 +1,89 @@ + + + + + Debug + Win32 + + + Release + Win32 + + + + {6152A268-A7CF-4E82-8E0B-A2FDC4342AFD} + Win32Proj + wintest + + + + Application + true + Unicode + + + Application + false + true + Unicode + + + + + + + + + + + + + true + + + false + + + + NotUsing + Level3 + Disabled + WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions) + + + Console + true + + + + + Level3 + NotUsing + MaxSpeed + true + true + WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions) + + + Console + true + true + true + + + + + + + + + + + + + + + + + + \ No newline at end of file From c6db39e78d94a802f33e329e6c41313515edd6f4 Mon Sep 17 00:00:00 2001 From: TimSC Date: Wed, 20 Nov 2013 14:29:28 +0000 Subject: [PATCH 120/256] Initialise com --- mfvideoout.cpp | 11 ++++++++++- wintest/wintest/wintest.vcxproj | 3 ++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/mfvideoout.cpp b/mfvideoout.cpp index 0e7d6ad..e9daf2e 100644 --- a/mfvideoout.cpp +++ b/mfvideoout.cpp @@ -1,17 +1,26 @@ #include "mfvideoout.h" +#include +#include //http://msdn.microsoft.com/en-us/library/windows/desktop/ms700134%28v=vs.85%29.aspx - MfVideoOut::MfVideoOut(const char *devName) : Base_Video_Out() { + HRESULT hr = MFStartup(MF_VERSION); + if(!SUCCEEDED(hr)) + throw std::runtime_error("Media foundation startup failed"); + hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED); + if(hr == RPC_E_CHANGED_MODE) + throw std::runtime_error("CoInitializeEx failed"); } MfVideoOut::~MfVideoOut() { + MFShutdown(); + CoUninitialize(); } void MfVideoOut::SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) diff --git a/wintest/wintest/wintest.vcxproj b/wintest/wintest/wintest.vcxproj index 86f7578..784042e 100644 --- a/wintest/wintest/wintest.vcxproj +++ b/wintest/wintest/wintest.vcxproj @@ -53,6 +53,7 @@ Console true + kernel32.lib;user32.lib;gdi32.lib;winspool.lib;comdlg32.lib;advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;uuid.lib;odbc32.lib;odbccp32.lib;%(AdditionalDependencies);Mfplat.lib;Mf.lib;Mfreadwrite.lib @@ -69,6 +70,7 @@ true true true + kernel32.lib;user32.lib;gdi32.lib;winspool.lib;comdlg32.lib;advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;uuid.lib;odbc32.lib;odbccp32.lib;%(AdditionalDependencies);Mfplat.lib;Mf.lib;Mfreadwrite.lib @@ -77,7 +79,6 @@ - From 41279e6ab21d9b38ae0ca42a769fd482fb8b73d0 Mon Sep 17 00:00:00 2001 From: TimSC Date: Fri, 22 Nov 2013 09:46:53 +0000 Subject: [PATCH 121/256] Add msdn ball example for modification --- mfvideoout.cpp | 5 + winsource/ball.cpp | 200 +++++++++++ winsource/ball.h | 70 ++++ winsource/ball.rc | 88 +++++ winsource/ball.sln | 26 ++ winsource/ball.vcproj | 396 +++++++++++++++++++++ winsource/fball.cpp | 671 ++++++++++++++++++++++++++++++++++++ winsource/fball.h | 94 +++++ winsource/resource.h | 17 + wintest/wintest/wintest.cpp | 6 + 10 files changed, 1573 insertions(+) create mode 100644 winsource/ball.cpp create mode 100644 winsource/ball.h create mode 100644 winsource/ball.rc create mode 100644 winsource/ball.sln create mode 100644 winsource/ball.vcproj create mode 100644 winsource/fball.cpp create mode 100644 winsource/fball.h create mode 100644 winsource/resource.h diff --git a/mfvideoout.cpp b/mfvideoout.cpp index e9daf2e..b79c73a 100644 --- a/mfvideoout.cpp +++ b/mfvideoout.cpp @@ -14,6 +14,11 @@ MfVideoOut::MfVideoOut(const char *devName) : Base_Video_Out() hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED); if(hr == RPC_E_CHANGED_MODE) throw std::runtime_error("CoInitializeEx failed"); + + + + + } MfVideoOut::~MfVideoOut() diff --git a/winsource/ball.cpp b/winsource/ball.cpp new file mode 100644 index 0000000..d46e723 --- /dev/null +++ b/winsource/ball.cpp @@ -0,0 +1,200 @@ +//------------------------------------------------------------------------------ +// File: Ball.cpp +// +// Desc: DirectShow sample code. This sample illustrates a simple source +// filter that produces decompressed images showing a ball bouncing +// around. Each movement of the ball is done by generating a new image. +// We use the CSource and CSourceStream base classes to manage a source +// filter - we are a live source and so do not support any seeking. +// +// The image stream is never-ending, with the ball color dependent on +// bit depth of the current display device. 32, 24, 16 (555 and 565), +// and 8 bit palettized types can be supplied. +// +// In implementation, the CSource and CSourceStream base classes from +// the SDK are used to implement some of the more tedious effort +// associated with source filters. In particular, the starting and +// stopping of worker threads based upon overall activation/stopping +// is facilitated. A worker thread sits in a loop asking for buffers +// and then calls the PURE virtual FillBuffer method when it has a +// buffer available to fill. +// +// The sample also has a simple quality management implementation in +// the filter. With the exception of renderers (which normally initiate +// it), this is controlled through IQualityControl. In each frame it +// is called for status. Due to the straightforward nature of the +// filter, spacing of samples sent downward can be controlled so that +// any CPU used runs flat out. +// +// Demonstration instructions: +// +// Start GraphEdit, which is available in the SDK DXUtils folder. Click +// on the Graph menu and select "Insert Filters." From the dialog box, +// double click on "DirectShow filters," then "Bouncing ball" and then +// dismiss the dialog. Go to the output pin of the filter box and +// right click, selecting "Render." A video renderer will be inserted +// and connected up (on some displays there may be a color space +// convertor put between them to get the pictures into a suitable +// format). Then click "run" on GraphEdit and see the ball bounce +// around the window... +// +// Files: +// +// ball.cpp Looks after drawing a moving bouncing ball +// ball.h Class definition for the ball drawing object +// ball.rc Version and title information resources +// fball.cpp The real filter class implementation +// fball.h Class definition for the main filter object +// resource.h A couple of identifiers for our resources +// +// Base classes used: +// +// CSource Base class for a generic source filter +// CSourceStream A base class for a source filters stream +// +// +// Copyright (c) Microsoft Corporation. All rights reserved. +//------------------------------------------------------------------------------ + + +#include +#include "ball.h" + + +//------------------------------------------------------------------------------ +// Name: CBall::CBall(() +// Desc: Constructor for the ball class. The default arguments provide a +// reasonable image and ball size. +//------------------------------------------------------------------------------ +CBall::CBall(int iImageWidth, int iImageHeight, int iBallSize) : + m_iImageWidth(iImageWidth), + m_iImageHeight(iImageHeight), + m_iBallSize(iBallSize), + m_iAvailableWidth(iImageWidth - iBallSize), + m_iAvailableHeight(iImageHeight - iBallSize), + m_x(0), + m_y(0), + m_xDir(RIGHT), + m_yDir(UP) +{ + // Check we have some (arbitrary) space to bounce in. + ASSERT(iImageWidth > 2*iBallSize); + ASSERT(iImageHeight > 2*iBallSize); + + // Random position for showing off a video mixer + m_iRandX = rand(); + m_iRandY = rand(); + +} // (Constructor) + + +//------------------------------------------------------------------------------ +// Name: CBall::PlotBall() +// Desc: Positions the ball on the memory buffer. +// Assumes the image buffer is arranged as Row 1,Row 2,...,Row n +// in memory and that the data is contiguous. +//------------------------------------------------------------------------------ +void CBall::PlotBall(BYTE pFrame[], BYTE BallPixel[], int iPixelSize) +{ + ASSERT(m_x >= 0); + ASSERT(m_x <= m_iAvailableWidth); + ASSERT(m_y >= 0); + ASSERT(m_y <= m_iAvailableHeight); + ASSERT(pFrame != NULL); + ASSERT(BallPixel != NULL); + + // The current byte of interest in the frame + BYTE *pBack; + pBack = pFrame; + + // Plot the ball into the correct location + BYTE *pBall = pFrame + ( m_y * m_iImageWidth * iPixelSize) + m_x * iPixelSize; + + for(int row = 0; row < m_iBallSize; row++) + { + for(int col = 0; col < m_iBallSize; col++) + { + // For each byte fill its value from BallPixel[] + for(int i = 0; i < iPixelSize; i++) + { + if(WithinCircle(col, row)) + { + *pBall = BallPixel[i]; + } + pBall++; + } + } + pBall += m_iAvailableWidth * iPixelSize; + } + +} // PlotBall + + +//------------------------------------------------------------------------------ +// CBall::BallPosition() +// +// Returns the 1-dimensional position of the ball at time t millisecs +// (note that millisecs runs out after about a month!) +//------------------------------------------------------------------------------ +int CBall::BallPosition(int iPixelTime, // Millisecs per pixel + int iLength, // Distance between the bounce points + int time, // Time in millisecs + int iOffset) // For a bit of randomness +{ + // Calculate the position of an unconstrained ball (no walls) + // then fold it back and forth to calculate the actual position + + int x = time / iPixelTime; + x += iOffset; + x %= 2 * iLength; + + // check it is still in bounds + if(x > iLength) + { + x = 2*iLength - x; + } + return x; + +} // BallPosition + + +//------------------------------------------------------------------------------ +// CBall::MoveBall() +// +// Set (m_x, m_y) to the new position of the ball. move diagonally +// with speed m_v in each of x and y directions. +// Guarantees to keep the ball in valid areas of the frame. +// When it hits an edge the ball bounces in the traditional manner!. +// The boundaries are (0..m_iAvailableWidth, 0..m_iAvailableHeight) +// +//------------------------------------------------------------------------------ +void CBall::MoveBall(CRefTime rt) +{ + m_x = BallPosition(10, m_iAvailableWidth, rt.Millisecs(), m_iRandX); + m_y = BallPosition(10, m_iAvailableHeight, rt.Millisecs(), m_iRandY); + +} // MoveBall + + +//------------------------------------------------------------------------------ +// CBall:WithinCircle() +// +// Return TRUE if (x,y) is within a circle radius S/2, center (S/2, S/2) +// where S is m_iBallSize else return FALSE +//------------------------------------------------------------------------------ +inline BOOL CBall::WithinCircle(int x, int y) +{ + unsigned int r = m_iBallSize / 2; + + if((x-r)*(x-r) + (y-r)*(y-r) < r*r) + { + return TRUE; + } + else + { + return FALSE; + } + +} // WithinCircle + + diff --git a/winsource/ball.h b/winsource/ball.h new file mode 100644 index 0000000..8285788 --- /dev/null +++ b/winsource/ball.h @@ -0,0 +1,70 @@ +//------------------------------------------------------------------------------ +// File: Ball.h +// +// Desc: DirectShow sample code - header file for the bouncing ball +// source filter. For more information, refer to Ball.cpp. +// +// Copyright (c) Microsoft Corporation. All rights reserved. +//------------------------------------------------------------------------------ + +//------------------------------------------------------------------------------ +// Define GUIDS used in this sample +//------------------------------------------------------------------------------ +// { fd501041-8ebe-11ce-8183-00aa00577da1 } +DEFINE_GUID(CLSID_BouncingBall, +0xfd501041, 0x8ebe, 0x11ce, 0x81, 0x83, 0x00, 0xaa, 0x00, 0x57, 0x7d, 0xa1); + + +//------------------------------------------------------------------------------ +// Class CBall +// +// This class encapsulates the behavior of the bounching ball over time +//------------------------------------------------------------------------------ +class CBall +{ +public: + + CBall(int iImageWidth = 320, int iImageHeight = 240, int iBallSize = 10); + + // Plots the square ball in the image buffer, at the current location. + // Use BallPixel[] as pixel value for the ball. + // Plots zero in all 'background' image locations. + // iPixelSize - the number of bytes in a pixel (size of BallPixel[]) + void PlotBall(BYTE pFrame[], BYTE BallPixel[], int iPixelSize); + + // Moves the ball 1 pixel in each of the x and y directions + void MoveBall(CRefTime rt); + + int GetImageWidth() { return m_iImageWidth ;} + int GetImageHeight() { return m_iImageHeight ;} + +private: + + enum xdir { LEFT = -1, RIGHT = 1 }; + enum ydir { UP = 1, DOWN = -1 }; + + // The dimensions we can plot in, allowing for the width of the ball + int m_iAvailableHeight, m_iAvailableWidth; + + int m_iImageHeight; // The image height + int m_iImageWidth; // The image width + int m_iBallSize; // The diameter of the ball + int m_iRandX, m_iRandY; // For a bit of randomness + xdir m_xDir; // Direction the ball + ydir m_yDir; // Likewise vertically + + // The X position, in pixels, of the ball in the frame + // (0 < x < m_iAvailableWidth) + int m_x; + + // The Y position, in pixels, of the ball in the frame + // (0 < y < m_iAvailableHeight) + int m_y; + + // Return the one-dimensional position of the ball at time T milliseconds + int BallPosition(int iPixelTime, int iLength, int time, int iOffset); + + /// Tests a given pixel to see if it should be plotted + BOOL WithinCircle(int x, int y); + +}; // CBall diff --git a/winsource/ball.rc b/winsource/ball.rc new file mode 100644 index 0000000..ae46255 --- /dev/null +++ b/winsource/ball.rc @@ -0,0 +1,88 @@ +//Microsoft Developer Studio generated resource script. +// +// Version include +//#include + +#include "resource.h" + +#define APSTUDIO_READONLY_SYMBOLS +///////////////////////////////////////////////////////////////////////////// +// +// Generated from the TEXTINCLUDE 2 resource. +// +#include "windows.h" + +///////////////////////////////////////////////////////////////////////////// +#undef APSTUDIO_READONLY_SYMBOLS + +///////////////////////////////////////////////////////////////////////////// +// English (U.S.) resources + +#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU) +#ifdef _WIN32 +LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US +#pragma code_page(1252) +#endif //_WIN32 + +#ifdef APSTUDIO_INVOKED +///////////////////////////////////////////////////////////////////////////// +// +// TEXTINCLUDE +// + +1 TEXTINCLUDE DISCARDABLE +BEGIN + "resource.h\0" +END + +2 TEXTINCLUDE DISCARDABLE +BEGIN + "#include ""windows.h""\r\n" + "\0" +END + +3 TEXTINCLUDE DISCARDABLE +BEGIN + "\r\n" + "\0" +END + +#endif // APSTUDIO_INVOKED + + +///////////////////////////////////////////////////////////////////////////// +// +// String Table +// + +STRINGTABLE DISCARDABLE +BEGIN + IDS_TITLE "Bouncing Ball" +END + +#endif // English (U.S.) resources +///////////////////////////////////////////////////////////////////////////// + +// +// Version Info +// +#define VERSION_RES_BIN_NAME "Ball.dll\0" +#define VERSION_RES_BIN_DESCRIPTION "Bouncing Ball Filter (Sample)\0" + +#define AMOVIE_SELF_REGISTER + +//#include + + + + +#ifndef APSTUDIO_INVOKED +///////////////////////////////////////////////////////////////////////////// +// +// Generated from the TEXTINCLUDE 3 resource. +// + + +///////////////////////////////////////////////////////////////////////////// +#endif // not APSTUDIO_INVOKED + diff --git a/winsource/ball.sln b/winsource/ball.sln new file mode 100644 index 0000000..11590b5 --- /dev/null +++ b/winsource/ball.sln @@ -0,0 +1,26 @@ + +Microsoft Visual Studio Solution File, Format Version 11.00 +# Visual Studio 2010 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "Ball", "Ball.vcxproj", "{9D3C9114-5067-45E6-B83D-12D31EF86297}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Win32 = Debug|Win32 + Debug|x64 = Debug|x64 + Release|Win32 = Release|Win32 + Release|x64 = Release|x64 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {9D3C9114-5067-45E6-B83D-12D31EF86297}.Debug|Win32.ActiveCfg = Debug|Win32 + {9D3C9114-5067-45E6-B83D-12D31EF86297}.Debug|Win32.Build.0 = Debug|Win32 + {9D3C9114-5067-45E6-B83D-12D31EF86297}.Debug|x64.ActiveCfg = Debug|x64 + {9D3C9114-5067-45E6-B83D-12D31EF86297}.Debug|x64.Build.0 = Debug|x64 + {9D3C9114-5067-45E6-B83D-12D31EF86297}.Release|Win32.ActiveCfg = Release|Win32 + {9D3C9114-5067-45E6-B83D-12D31EF86297}.Release|Win32.Build.0 = Release|Win32 + {9D3C9114-5067-45E6-B83D-12D31EF86297}.Release|x64.ActiveCfg = Release|x64 + {9D3C9114-5067-45E6-B83D-12D31EF86297}.Release|x64.Build.0 = Release|x64 + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/winsource/ball.vcproj b/winsource/ball.vcproj new file mode 100644 index 0000000..4f9d389 --- /dev/null +++ b/winsource/ball.vcproj @@ -0,0 +1,396 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/winsource/fball.cpp b/winsource/fball.cpp new file mode 100644 index 0000000..c5f9387 --- /dev/null +++ b/winsource/fball.cpp @@ -0,0 +1,671 @@ +//------------------------------------------------------------------------------ +// File: FBall.cpp +// +// Desc: DirectShow sample code - implementation of filter behaviors +// for the bouncing ball source filter. For more information, +// refer to Ball.cpp. +// +// Copyright (c) Microsoft Corporation. All rights reserved. +//------------------------------------------------------------------------------ + +#include +#include +#include +#include "ball.h" +#include "fball.h" + +#pragma warning(disable:4710) // 'function': function not inlined (optimzation) + +// Setup data + +const AMOVIESETUP_MEDIATYPE sudOpPinTypes = +{ + &MEDIATYPE_Video, // Major type + &MEDIASUBTYPE_NULL // Minor type +}; + +const AMOVIESETUP_PIN sudOpPin = +{ + L"Output", // Pin string name + FALSE, // Is it rendered + TRUE, // Is it an output + FALSE, // Can we have none + FALSE, // Can we have many + &CLSID_NULL, // Connects to filter + NULL, // Connects to pin + 1, // Number of types + &sudOpPinTypes }; // Pin details + +const AMOVIESETUP_FILTER sudBallax = +{ + &CLSID_BouncingBall, // Filter CLSID + L"Bouncing Ball", // String name + MERIT_DO_NOT_USE, // Filter merit + 1, // Number pins + &sudOpPin // Pin details +}; + + +// COM global table of objects in this dll + +CFactoryTemplate g_Templates[] = { + { L"Bouncing Ball" + , &CLSID_BouncingBall + , CBouncingBall::CreateInstance + , NULL + , &sudBallax } +}; +int g_cTemplates = sizeof(g_Templates) / sizeof(g_Templates[0]); + + +//////////////////////////////////////////////////////////////////////// +// +// Exported entry points for registration and unregistration +// (in this case they only call through to default implementations). +// +//////////////////////////////////////////////////////////////////////// + +// +// DllRegisterServer +// +// Exported entry points for registration and unregistration +// +STDAPI DllRegisterServer() +{ + return AMovieDllRegisterServer2(TRUE); + +} // DllRegisterServer + + +// +// DllUnregisterServer +// +STDAPI DllUnregisterServer() +{ + return AMovieDllRegisterServer2(FALSE); + +} // DllUnregisterServer + + +// +// DllEntryPoint +// +extern "C" BOOL WINAPI DllEntryPoint(HINSTANCE, ULONG, LPVOID); + +BOOL APIENTRY DllMain(HANDLE hModule, + DWORD dwReason, + LPVOID lpReserved) +{ + return DllEntryPoint((HINSTANCE)(hModule), dwReason, lpReserved); +} + +// +// CreateInstance +// +// The only allowed way to create Bouncing balls! +// +CUnknown * WINAPI CBouncingBall::CreateInstance(LPUNKNOWN lpunk, HRESULT *phr) +{ + ASSERT(phr); + + CUnknown *punk = new CBouncingBall(lpunk, phr); + if(punk == NULL) + { + if(phr) + *phr = E_OUTOFMEMORY; + } + return punk; + +} // CreateInstance + + +// +// Constructor +// +// Initialise a CBallStream object so that we have a pin. +// +CBouncingBall::CBouncingBall(LPUNKNOWN lpunk, HRESULT *phr) : + CSource(NAME("Bouncing ball"), lpunk, CLSID_BouncingBall) +{ + ASSERT(phr); + CAutoLock cAutoLock(&m_cStateLock); + + m_paStreams = (CSourceStream **) new CBallStream*[1]; + if(m_paStreams == NULL) + { + if(phr) + *phr = E_OUTOFMEMORY; + + return; + } + + m_paStreams[0] = new CBallStream(phr, this, L"A Bouncing Ball!"); + if(m_paStreams[0] == NULL) + { + if(phr) + *phr = E_OUTOFMEMORY; + + return; + } + +} // (Constructor) + + +// +// Constructor +// +CBallStream::CBallStream(HRESULT *phr, + CBouncingBall *pParent, + LPCWSTR pPinName) : + CSourceStream(NAME("Bouncing Ball"),phr, pParent, pPinName), + m_iImageWidth(320), + m_iImageHeight(240), + m_iDefaultRepeatTime(20) +{ + ASSERT(phr); + CAutoLock cAutoLock(&m_cSharedState); + + m_Ball = new CBall(m_iImageWidth, m_iImageHeight); + if(m_Ball == NULL) + { + if(phr) + *phr = E_OUTOFMEMORY; + } + +} // (Constructor) + + +// +// Destructor +// +CBallStream::~CBallStream() +{ + CAutoLock cAutoLock(&m_cSharedState); + if(m_Ball) + delete m_Ball; + +} // (Destructor) + + +// +// FillBuffer +// +// Plots a ball into the supplied video buffer +// +HRESULT CBallStream::FillBuffer(IMediaSample *pms) +{ + CheckPointer(pms,E_POINTER); + ASSERT(m_Ball); + + BYTE *pData; + long lDataLen; + + pms->GetPointer(&pData); + lDataLen = pms->GetSize(); + + ZeroMemory(pData, lDataLen); + { + CAutoLock cAutoLockShared(&m_cSharedState); + + // If we haven't just cleared the buffer delete the old + // ball and move the ball on + + m_Ball->MoveBall(m_rtSampleTime - (LONG) m_iRepeatTime); + m_Ball->PlotBall(pData, m_BallPixel, m_iPixelSize); + + // The current time is the sample's start + CRefTime rtStart = m_rtSampleTime; + + // Increment to find the finish time + m_rtSampleTime += (LONG)m_iRepeatTime; + + pms->SetTime((REFERENCE_TIME *) &rtStart,(REFERENCE_TIME *) &m_rtSampleTime); + } + + pms->SetSyncPoint(TRUE); + return NOERROR; + +} // FillBuffer + + +// +// Notify +// +// Alter the repeat rate according to quality management messages sent from +// the downstream filter (often the renderer). Wind it up or down according +// to the flooding level - also skip forward if we are notified of Late-ness +// +STDMETHODIMP CBallStream::Notify(IBaseFilter * pSender, Quality q) +{ + // Adjust the repeat rate. + if(q.Proportion<=0) + { + m_iRepeatTime = 1000; // We don't go slower than 1 per second + } + else + { + m_iRepeatTime = m_iRepeatTime*1000 / q.Proportion; + if(m_iRepeatTime>1000) + { + m_iRepeatTime = 1000; // We don't go slower than 1 per second + } + else if(m_iRepeatTime<10) + { + m_iRepeatTime = 10; // We don't go faster than 100/sec + } + } + + // skip forwards + if(q.Late > 0) + m_rtSampleTime += q.Late; + + return NOERROR; + +} // Notify + + +// +// GetMediaType +// +// I _prefer_ 5 formats - 8, 16 (*2), 24 or 32 bits per pixel and +// I will suggest these with an image size of 320x240. However +// I can accept any image size which gives me some space to bounce. +// +// A bit of fun: +// 8 bit displays get red balls +// 16 bit displays get blue +// 24 bit see green +// And 32 bit see yellow +// +// Prefered types should be ordered by quality, zero as highest quality +// Therefore iPosition = +// 0 return a 32bit mediatype +// 1 return a 24bit mediatype +// 2 return 16bit RGB565 +// 3 return a 16bit mediatype (rgb555) +// 4 return 8 bit palettised format +// (iPosition > 4 is invalid) +// +HRESULT CBallStream::GetMediaType(int iPosition, CMediaType *pmt) +{ + CheckPointer(pmt,E_POINTER); + + CAutoLock cAutoLock(m_pFilter->pStateLock()); + if(iPosition < 0) + { + return E_INVALIDARG; + } + + // Have we run off the end of types? + + if(iPosition > 4) + { + return VFW_S_NO_MORE_ITEMS; + } + + VIDEOINFO *pvi = (VIDEOINFO *) pmt->AllocFormatBuffer(sizeof(VIDEOINFO)); + if(NULL == pvi) + return(E_OUTOFMEMORY); + + ZeroMemory(pvi, sizeof(VIDEOINFO)); + + switch(iPosition) + { + case 0: + { + // Return our highest quality 32bit format + + // since we use RGB888 (the default for 32 bit), there is + // no reason to use BI_BITFIELDS to specify the RGB + // masks. Also, not everything supports BI_BITFIELDS + + SetPaletteEntries(Yellow); + pvi->bmiHeader.biCompression = BI_RGB; + pvi->bmiHeader.biBitCount = 32; + break; + } + + case 1: + { // Return our 24bit format + + SetPaletteEntries(Green); + pvi->bmiHeader.biCompression = BI_RGB; + pvi->bmiHeader.biBitCount = 24; + break; + } + + case 2: + { + // 16 bit per pixel RGB565 + + // Place the RGB masks as the first 3 doublewords in the palette area + for(int i = 0; i < 3; i++) + pvi->TrueColorInfo.dwBitMasks[i] = bits565[i]; + + SetPaletteEntries(Blue); + pvi->bmiHeader.biCompression = BI_BITFIELDS; + pvi->bmiHeader.biBitCount = 16; + break; + } + + case 3: + { // 16 bits per pixel RGB555 + + // Place the RGB masks as the first 3 doublewords in the palette area + for(int i = 0; i < 3; i++) + pvi->TrueColorInfo.dwBitMasks[i] = bits555[i]; + + SetPaletteEntries(Blue); + pvi->bmiHeader.biCompression = BI_BITFIELDS; + pvi->bmiHeader.biBitCount = 16; + break; + } + + case 4: + { // 8 bit palettised + + SetPaletteEntries(Red); + pvi->bmiHeader.biCompression = BI_RGB; + pvi->bmiHeader.biBitCount = 8; + pvi->bmiHeader.biClrUsed = iPALETTE_COLORS; + break; + } + } + + // (Adjust the parameters common to all formats...) + + // put the optimal palette in place + for(int i = 0; i < iPALETTE_COLORS; i++) + { + pvi->TrueColorInfo.bmiColors[i].rgbRed = m_Palette[i].peRed; + pvi->TrueColorInfo.bmiColors[i].rgbBlue = m_Palette[i].peBlue; + pvi->TrueColorInfo.bmiColors[i].rgbGreen = m_Palette[i].peGreen; + pvi->TrueColorInfo.bmiColors[i].rgbReserved = 0; + } + + pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER); + pvi->bmiHeader.biWidth = m_iImageWidth; + pvi->bmiHeader.biHeight = m_iImageHeight; + pvi->bmiHeader.biPlanes = 1; + pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader); + pvi->bmiHeader.biClrImportant = 0; + + SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered. + SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle + + pmt->SetType(&MEDIATYPE_Video); + pmt->SetFormatType(&FORMAT_VideoInfo); + pmt->SetTemporalCompression(FALSE); + + // Work out the GUID for the subtype from the header info. + const GUID SubTypeGUID = GetBitmapSubtype(&pvi->bmiHeader); + pmt->SetSubtype(&SubTypeGUID); + pmt->SetSampleSize(pvi->bmiHeader.biSizeImage); + + return NOERROR; + +} // GetMediaType + + +// +// CheckMediaType +// +// We will accept 8, 16, 24 or 32 bit video formats, in any +// image size that gives room to bounce. +// Returns E_INVALIDARG if the mediatype is not acceptable +// +HRESULT CBallStream::CheckMediaType(const CMediaType *pMediaType) +{ + CheckPointer(pMediaType,E_POINTER); + + if((*(pMediaType->Type()) != MEDIATYPE_Video) || // we only output video + !(pMediaType->IsFixedSize())) // in fixed size samples + { + return E_INVALIDARG; + } + + // Check for the subtypes we support + const GUID *SubType = pMediaType->Subtype(); + if (SubType == NULL) + return E_INVALIDARG; + + if((*SubType != MEDIASUBTYPE_RGB8) + && (*SubType != MEDIASUBTYPE_RGB565) + && (*SubType != MEDIASUBTYPE_RGB555) + && (*SubType != MEDIASUBTYPE_RGB24) + && (*SubType != MEDIASUBTYPE_RGB32)) + { + return E_INVALIDARG; + } + + // Get the format area of the media type + VIDEOINFO *pvi = (VIDEOINFO *) pMediaType->Format(); + + if(pvi == NULL) + return E_INVALIDARG; + + // Check the image size. As my default ball is 10 pixels big + // look for at least a 20x20 image. This is an arbitary size constraint, + // but it avoids balls that are bigger than the picture... + + if((pvi->bmiHeader.biWidth < 20) || ( abs(pvi->bmiHeader.biHeight) < 20)) + { + return E_INVALIDARG; + } + + // Check if the image width & height have changed + if(pvi->bmiHeader.biWidth != m_Ball->GetImageWidth() || + abs(pvi->bmiHeader.biHeight) != m_Ball->GetImageHeight()) + { + // If the image width/height is changed, fail CheckMediaType() to force + // the renderer to resize the image. + return E_INVALIDARG; + } + + + return S_OK; // This format is acceptable. + +} // CheckMediaType + + +// +// DecideBufferSize +// +// This will always be called after the format has been sucessfully +// negotiated. So we have a look at m_mt to see what size image we agreed. +// Then we can ask for buffers of the correct size to contain them. +// +HRESULT CBallStream::DecideBufferSize(IMemAllocator *pAlloc, + ALLOCATOR_PROPERTIES *pProperties) +{ + CheckPointer(pAlloc,E_POINTER); + CheckPointer(pProperties,E_POINTER); + + CAutoLock cAutoLock(m_pFilter->pStateLock()); + HRESULT hr = NOERROR; + + VIDEOINFO *pvi = (VIDEOINFO *) m_mt.Format(); + pProperties->cBuffers = 1; + pProperties->cbBuffer = pvi->bmiHeader.biSizeImage; + + ASSERT(pProperties->cbBuffer); + + // Ask the allocator to reserve us some sample memory, NOTE the function + // can succeed (that is return NOERROR) but still not have allocated the + // memory that we requested, so we must check we got whatever we wanted + + ALLOCATOR_PROPERTIES Actual; + hr = pAlloc->SetProperties(pProperties,&Actual); + if(FAILED(hr)) + { + return hr; + } + + // Is this allocator unsuitable + + if(Actual.cbBuffer < pProperties->cbBuffer) + { + return E_FAIL; + } + + // Make sure that we have only 1 buffer (we erase the ball in the + // old buffer to save having to zero a 200k+ buffer every time + // we draw a frame) + + ASSERT(Actual.cBuffers == 1); + return NOERROR; + +} // DecideBufferSize + + +// +// SetMediaType +// +// Called when a media type is agreed between filters +// +HRESULT CBallStream::SetMediaType(const CMediaType *pMediaType) +{ + CAutoLock cAutoLock(m_pFilter->pStateLock()); + + // Pass the call up to my base class + + HRESULT hr = CSourceStream::SetMediaType(pMediaType); + + if(SUCCEEDED(hr)) + { + VIDEOINFO * pvi = (VIDEOINFO *) m_mt.Format(); + if (pvi == NULL) + return E_UNEXPECTED; + + switch(pvi->bmiHeader.biBitCount) + { + case 8: // Make a red pixel + + m_BallPixel[0] = 10; // 0 is palette index of red + m_iPixelSize = 1; + SetPaletteEntries(Red); + break; + + case 16: // Make a blue pixel + + m_BallPixel[0] = 0xf8; // 00000000 00011111 is blue in rgb555 or rgb565 + m_BallPixel[1] = 0x0; // don't forget the byte ordering within the mask word. + m_iPixelSize = 2; + SetPaletteEntries(Blue); + break; + + case 24: // Make a green pixel + + m_BallPixel[0] = 0x0; + m_BallPixel[1] = 0xff; + m_BallPixel[2] = 0x0; + m_iPixelSize = 3; + SetPaletteEntries(Green); + break; + + case 32: // Make a yellow pixel + + m_BallPixel[0] = 0x0; + m_BallPixel[1] = 0xff; + m_BallPixel[2] = 0xff; + m_BallPixel[3] = 0x00; + m_iPixelSize = 4; + SetPaletteEntries(Yellow); + break; + + default: + // We should never agree any other pixel sizes + ASSERT(FALSE); + break; + } + + CBall *pNewBall = new CBall(pvi->bmiHeader.biWidth, abs(pvi->bmiHeader.biHeight)); + + if(pNewBall) + { + delete m_Ball; + m_Ball = pNewBall; + } + else + hr = E_OUTOFMEMORY; + + return NOERROR; + } + + return hr; + +} // SetMediaType + + +// +// OnThreadCreate +// +// As we go active reset the stream time to zero +// +HRESULT CBallStream::OnThreadCreate() +{ + CAutoLock cAutoLockShared(&m_cSharedState); + m_rtSampleTime = 0; + + // we need to also reset the repeat time in case the system + // clock is turned off after m_iRepeatTime gets very big + m_iRepeatTime = m_iDefaultRepeatTime; + + return NOERROR; + +} // OnThreadCreate + + +// +// SetPaletteEntries +// +// If we set our palette to the current system palette + the colours we want +// the system has the least amount of work to do whilst plotting our images, +// if this stream is rendered to the current display. The first non reserved +// palette slot is at m_Palette[10], so put our first colour there. Also +// guarantees that black is always represented by zero in the frame buffer +// +HRESULT CBallStream::SetPaletteEntries(Colour color) +{ + CAutoLock cAutoLock(m_pFilter->pStateLock()); + + HDC hdc = GetDC(NULL); // hdc for the current display. + UINT res = GetSystemPaletteEntries(hdc, 0, iPALETTE_COLORS, (LPPALETTEENTRY) &m_Palette); + ReleaseDC(NULL, hdc); + + if(res == 0) + return E_FAIL; + + switch(color) + { + case Red: + m_Palette[10].peBlue = 0; + m_Palette[10].peGreen = 0; + m_Palette[10].peRed = 0xff; + break; + + case Yellow: + m_Palette[10].peBlue = 0; + m_Palette[10].peGreen = 0xff; + m_Palette[10].peRed = 0xff; + break; + + case Blue: + m_Palette[10].peBlue = 0xff; + m_Palette[10].peGreen = 0; + m_Palette[10].peRed = 0; + break; + + case Green: + m_Palette[10].peBlue = 0; + m_Palette[10].peGreen = 0xff; + m_Palette[10].peRed = 0; + break; + } + + m_Palette[10].peFlags = 0; + return NOERROR; + +} // SetPaletteEntries + + diff --git a/winsource/fball.h b/winsource/fball.h new file mode 100644 index 0000000..6b96f02 --- /dev/null +++ b/winsource/fball.h @@ -0,0 +1,94 @@ +//------------------------------------------------------------------------------ +// File: FBall.h +// +// Desc: DirectShow sample code - main header file for the bouncing ball +// source filter. For more information refer to Ball.cpp +// +// Copyright (c) Microsoft Corporation. All rights reserved. +//------------------------------------------------------------------------------ + +//------------------------------------------------------------------------------ +// Forward Declarations +//------------------------------------------------------------------------------ +// The class managing the output pin +class CBallStream; + + +//------------------------------------------------------------------------------ +// Class CBouncingBall +// +// This is the main class for the bouncing ball filter. It inherits from +// CSource, the DirectShow base class for source filters. +//------------------------------------------------------------------------------ +class CBouncingBall : public CSource +{ +public: + + // The only allowed way to create Bouncing balls! + static CUnknown * WINAPI CreateInstance(LPUNKNOWN lpunk, HRESULT *phr); + +private: + + // It is only allowed to to create these objects with CreateInstance + CBouncingBall(LPUNKNOWN lpunk, HRESULT *phr); + +}; // CBouncingBall + + +//------------------------------------------------------------------------------ +// Class CBallStream +// +// This class implements the stream which is used to output the bouncing ball +// data from the source filter. It inherits from DirectShows's base +// CSourceStream class. +//------------------------------------------------------------------------------ +class CBallStream : public CSourceStream +{ + +public: + + CBallStream(HRESULT *phr, CBouncingBall *pParent, LPCWSTR pPinName); + ~CBallStream(); + + // plots a ball into the supplied video frame + HRESULT FillBuffer(IMediaSample *pms); + + // Ask for buffers of the size appropriate to the agreed media type + HRESULT DecideBufferSize(IMemAllocator *pIMemAlloc, + ALLOCATOR_PROPERTIES *pProperties); + + // Set the agreed media type, and set up the necessary ball parameters + HRESULT SetMediaType(const CMediaType *pMediaType); + + // Because we calculate the ball there is no reason why we + // can't calculate it in any one of a set of formats... + HRESULT CheckMediaType(const CMediaType *pMediaType); + HRESULT GetMediaType(int iPosition, CMediaType *pmt); + + // Resets the stream time to zero + HRESULT OnThreadCreate(void); + + // Quality control notifications sent to us + STDMETHODIMP Notify(IBaseFilter * pSender, Quality q); + +private: + + int m_iImageHeight; // The current image height + int m_iImageWidth; // And current image width + int m_iRepeatTime; // Time in msec between frames + const int m_iDefaultRepeatTime; // Initial m_iRepeatTime + + BYTE m_BallPixel[4]; // Represents one coloured ball + int m_iPixelSize; // The pixel size in bytes + PALETTEENTRY m_Palette[256]; // The optimal palette for the image + + CCritSec m_cSharedState; // Lock on m_rtSampleTime and m_Ball + CRefTime m_rtSampleTime; // The time stamp for each sample + CBall *m_Ball; // The current ball object + + // set up the palette appropriately + enum Colour {Red, Blue, Green, Yellow}; + HRESULT SetPaletteEntries(Colour colour); + +}; // CBallStream + diff --git a/winsource/resource.h b/winsource/resource.h new file mode 100644 index 0000000..952244e --- /dev/null +++ b/winsource/resource.h @@ -0,0 +1,17 @@ +//{{NO_DEPENDENCIES}} +// Microsoft Developer Studio generated include file. +// Used by ball.rc +// +#define IDS_TITLE 100 + +// Next default values for new objects +// +#ifdef APSTUDIO_INVOKED +#ifndef APSTUDIO_READONLY_SYMBOLS +#define _APS_NO_MFC 1 +#define _APS_NEXT_RESOURCE_VALUE 101 +#define _APS_NEXT_COMMAND_VALUE 40001 +#define _APS_NEXT_CONTROL_VALUE 1000 +#define _APS_NEXT_SYMED_VALUE 101 +#endif +#endif diff --git a/wintest/wintest/wintest.cpp b/wintest/wintest/wintest.cpp index aa3d083..c89054e 100644 --- a/wintest/wintest/wintest.cpp +++ b/wintest/wintest/wintest.cpp @@ -3,11 +3,17 @@ //#include "stdafx.h" #include "../../mfvideoout.h" +#include int main(int argc, char* argv[]) { class MfVideoOut mfVideoOut("test"); + while(1) + { + Sleep(100); + } + return 0; } From 04298a12561e16c2a93b96ff8f28d360ecc49895 Mon Sep 17 00:00:00 2001 From: TimSC Date: Fri, 22 Nov 2013 09:58:32 +0000 Subject: [PATCH 122/256] Attempting to compile and link --- winsource/Ball.vcxproj | 210 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 210 insertions(+) create mode 100644 winsource/Ball.vcxproj diff --git a/winsource/Ball.vcxproj b/winsource/Ball.vcxproj new file mode 100644 index 0000000..da1fe4b --- /dev/null +++ b/winsource/Ball.vcxproj @@ -0,0 +1,210 @@ + + + + + Debug + Win32 + + + Debug + x64 + + + Release + Win32 + + + Release + x64 + + + + {9D3C9114-5067-45E6-B83D-12D31EF86297} + Ball + Win32Proj + + + + DynamicLibrary + Unicode + + + DynamicLibrary + Unicode + + + DynamicLibrary + Unicode + + + DynamicLibrary + Unicode + + + + + + + + + + + + + + + + + + + <_ProjectFileVersion>10.0.40219.1 + Debug\ + Debug\ + true + Release\ + Release\ + false + $(Platform)\$(Configuration)\ + $(Platform)\$(Configuration)\ + true + $(Platform)\$(Configuration)\ + $(Platform)\$(Configuration)\ + false + AllRules.ruleset + + + AllRules.ruleset + + + AllRules.ruleset + + + AllRules.ruleset + + + C:\Program Files\Microsoft SDKs\Windows\v7.1\Samples\multimedia\directshow\baseclasses;$(VCInstallDir)include;$(VCInstallDir)atlmfc\include;$(WindowsSdkDir)include;$(FrameworkSDKDir)\include; + C:\Program Files\Microsoft SDKs\Windows\v7.1\Samples\multimedia\directshow\baseclasses;$(VCInstallDir)include;$(VCInstallDir)atlmfc\include;$(WindowsSdkDir)include;$(FrameworkSDKDir)\include; + C:\Program Files\Microsoft SDKs\Windows\v7.1\Samples\multimedia\directshow\baseclasses\Release;$(VCInstallDir)lib;$(VCInstallDir)atlmfc\lib;$(WindowsSdkDir)lib;$(FrameworkSDKDir)\lib + C:\Program Files\Microsoft SDKs\Windows\v7.1\Samples\multimedia\directshow\baseclasses\Debug;$(VCInstallDir)lib;$(VCInstallDir)atlmfc\lib;$(WindowsSdkDir)lib;$(FrameworkSDKDir)\lib + + + + Disabled + ..\..\BaseClasses\;%(AdditionalIncludeDirectories) + WIN32;_DEBUG;_WINDOWS;_USRDLL;BALL_EXPORTS;%(PreprocessorDefinitions) + true + EnableFastChecks + MultiThreadedDebugDLL + + + Level3 + EditAndContinue + StdCall + + + ..\..\BaseClasses\Debug\strmbasd.lib;winmm.lib;msvcrtd.lib;%(AdditionalDependencies) + %(AdditionalLibraryDirectories) + true + ball.def + true + Windows + MachineX86 + + + + + MaxSpeed + ..\..\BaseClasses\;$(DXSDK_DIR)\Include\;%(AdditionalIncludeDirectories) + WIN32;NDEBUG;_WINDOWS;_USRDLL;BALL_EXPORTS;%(PreprocessorDefinitions) + MultiThreadedDLL + + + Level3 + ProgramDatabase + StdCall + + + ..\..\BaseClasses\Release\strmbase.lib;winmm.lib;msvcrt.lib;%(AdditionalDependencies) + ..\..\BaseClasses\;%(AdditionalLibraryDirectories) + true + ball.def + true + Windows + true + true + MachineX86 + + + + + X64 + + + Disabled + ..\..\BaseClasses\;%(AdditionalIncludeDirectories) + WIN32;_DEBUG;_WINDOWS;_USRDLL;BALL_EXPORTS;%(PreprocessorDefinitions) + true + EnableFastChecks + MultiThreadedDebugDLL + + + Level3 + ProgramDatabase + StdCall + + + strmbasd.lib;winmm.lib;msvcrtd.lib;%(AdditionalDependencies) + ..\..\BaseClasses\x64\Debug\;%(AdditionalLibraryDirectories) + true + ball.def + true + Windows + MachineX64 + + + + + X64 + + + MaxSpeed + ..\..\BaseClasses\;$(DXSDK_DIR)\Include\;%(AdditionalIncludeDirectories) + WIN32;NDEBUG;_WINDOWS;_USRDLL;BALL_EXPORTS;%(PreprocessorDefinitions) + MultiThreadedDLL + + + Level3 + ProgramDatabase + StdCall + + + strmbase.lib;winmm.lib;msvcrt.lib;%(AdditionalDependencies) + ..\..\BaseClasses\x64\release;%(AdditionalLibraryDirectories) + true + ball.def + true + Windows + true + true + MachineX64 + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file From f8d4186c193bb55594e7dc1fdd3804fd3ff27cb3 Mon Sep 17 00:00:00 2001 From: TimSC Date: Fri, 22 Nov 2013 10:00:36 +0000 Subject: [PATCH 123/256] Now links --- winsource/Ball.vcxproj | 4 +- winsource/ball.vcproj | 396 ----------------------------------------- 2 files changed, 2 insertions(+), 398 deletions(-) delete mode 100644 winsource/ball.vcproj diff --git a/winsource/Ball.vcxproj b/winsource/Ball.vcxproj index da1fe4b..42de6a9 100644 --- a/winsource/Ball.vcxproj +++ b/winsource/Ball.vcxproj @@ -102,7 +102,7 @@ StdCall - ..\..\BaseClasses\Debug\strmbasd.lib;winmm.lib;msvcrtd.lib;%(AdditionalDependencies) + strmbasd.lib;winmm.lib;msvcrtd.lib;%(AdditionalDependencies) %(AdditionalLibraryDirectories) true ball.def @@ -124,7 +124,7 @@ StdCall - ..\..\BaseClasses\Release\strmbase.lib;winmm.lib;msvcrt.lib;%(AdditionalDependencies) + strmbase.lib;winmm.lib;msvcrt.lib;%(AdditionalDependencies) ..\..\BaseClasses\;%(AdditionalLibraryDirectories) true ball.def diff --git a/winsource/ball.vcproj b/winsource/ball.vcproj deleted file mode 100644 index 4f9d389..0000000 --- a/winsource/ball.vcproj +++ /dev/null @@ -1,396 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - From 7d7b9c88cf990b027276121dc0320c33b00dd4a7 Mon Sep 17 00:00:00 2001 From: TimSC Date: Fri, 22 Nov 2013 10:20:09 +0000 Subject: [PATCH 124/256] Video source is registered --- winsource/fball.cpp | 63 +++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 61 insertions(+), 2 deletions(-) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index c5f9387..9dc36d8 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -18,6 +18,10 @@ // Setup data +#define CreateComObject(clsid, iid, var) CoCreateInstance( clsid, NULL, CLSCTX_INPROC_SERVER, iid, (void **)&var); +STDAPI AMovieSetupRegisterServer( CLSID clsServer, LPCWSTR szDescription, LPCWSTR szFileName, LPCWSTR szThreadingModel = L"Both", LPCWSTR szServerType = L"InprocServer32" ); +STDAPI AMovieSetupUnregisterServer( CLSID clsServer ); + const AMOVIESETUP_MEDIATYPE sudOpPinTypes = { &MEDIATYPE_Video, // Major type @@ -65,6 +69,61 @@ int g_cTemplates = sizeof(g_Templates) / sizeof(g_Templates[0]); // //////////////////////////////////////////////////////////////////////// +STDAPI RegisterFilters( BOOL bRegister ) +{ + HRESULT hr = NOERROR; + WCHAR achFileName[MAX_PATH]; + char achTemp[MAX_PATH]; + ASSERT(g_hInst != 0); + + if( 0 == GetModuleFileNameA(g_hInst, achTemp, sizeof(achTemp))) + return AmHresultFromWin32(GetLastError()); + + MultiByteToWideChar(CP_ACP, 0L, achTemp, lstrlenA(achTemp) + 1, + achFileName, NUMELMS(achFileName)); + + hr = CoInitialize(0); + if(bRegister) + { + hr = AMovieSetupRegisterServer(CLSID_BouncingBall, L"Bouncing Ball", achFileName, L"Both", L"InprocServer32"); + } + + if( SUCCEEDED(hr) ) + { + IFilterMapper2 *fm = 0; + hr = CreateComObject( CLSID_FilterMapper2, IID_IFilterMapper2, fm ); + if( SUCCEEDED(hr) ) + { + if(bRegister) + { + IMoniker *pMoniker = 0; + REGFILTER2 rf2; + rf2.dwVersion = 1; + rf2.dwMerit = MERIT_DO_NOT_USE; + rf2.cPins = 1; + rf2.rgPins = &sudOpPin; + hr = fm->RegisterFilter(CLSID_BouncingBall, L"Bouncing Ball", &pMoniker, &CLSID_VideoInputDeviceCategory, NULL, &rf2); + } + else + { + hr = fm->UnregisterFilter(&CLSID_VideoInputDeviceCategory, 0, CLSID_BouncingBall); + } + } + + // release interface + // + if(fm) + fm->Release(); + } + + if( SUCCEEDED(hr) && !bRegister ) + hr = AMovieSetupUnregisterServer( CLSID_BouncingBall ); + + CoFreeUnusedLibraries(); + CoUninitialize(); + return hr; +} + // // DllRegisterServer // @@ -72,7 +131,7 @@ int g_cTemplates = sizeof(g_Templates) / sizeof(g_Templates[0]); // STDAPI DllRegisterServer() { - return AMovieDllRegisterServer2(TRUE); + return RegisterFilters(TRUE); } // DllRegisterServer @@ -82,7 +141,7 @@ STDAPI DllRegisterServer() // STDAPI DllUnregisterServer() { - return AMovieDllRegisterServer2(FALSE); + return RegisterFilters(FALSE); } // DllUnregisterServer From 7d50d53ccf7954b580326f3cceeecf1d604e4dfc Mon Sep 17 00:00:00 2001 From: TimSC Date: Fri, 22 Nov 2013 10:57:05 +0000 Subject: [PATCH 125/256] Working on format negotiation --- winsource/fball.cpp | 304 ++++++++++++++++++++++++++++++++++++++------ winsource/fball.h | 21 ++- 2 files changed, 287 insertions(+), 38 deletions(-) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index 9dc36d8..9710bea 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -216,10 +216,11 @@ CBouncingBall::CBouncingBall(LPUNKNOWN lpunk, HRESULT *phr) : CBallStream::CBallStream(HRESULT *phr, CBouncingBall *pParent, LPCWSTR pPinName) : - CSourceStream(NAME("Bouncing Ball"),phr, pParent, pPinName), - m_iImageWidth(320), - m_iImageHeight(240), - m_iDefaultRepeatTime(20) + CSourceStream(NAME("Bouncing Ball"),phr, pParent, pPinName), + m_pParent(pParent), + m_iDefaultRepeatTime(20), + m_iImageWidth(320), + m_iImageHeight(240) { ASSERT(phr); CAutoLock cAutoLock(&m_cSharedState); @@ -246,6 +247,21 @@ CBallStream::~CBallStream() } // (Destructor) +HRESULT CBallStream::QueryInterface(REFIID riid, void **ppv) +{ + // Standard OLE stuff + if(riid == _uuidof(IAMStreamConfig)) + *ppv = (IAMStreamConfig*)this; + else if(riid == _uuidof(IKsPropertySet)) + *ppv = (IKsPropertySet*)this; + else + return CSourceStream::QueryInterface(riid, ppv); + + AddRef(); + return S_OK; +} + + // // FillBuffer // @@ -253,35 +269,24 @@ CBallStream::~CBallStream() // HRESULT CBallStream::FillBuffer(IMediaSample *pms) { - CheckPointer(pms,E_POINTER); - ASSERT(m_Ball); + REFERENCE_TIME rtNow; + + REFERENCE_TIME avgFrameTime = ((VIDEOINFOHEADER*)m_mt.pbFormat)->AvgTimePerFrame; + + rtNow = m_rtLastTime; + m_rtLastTime += avgFrameTime; + pms->SetTime(&rtNow, &m_rtLastTime); + pms->SetSyncPoint(TRUE); BYTE *pData; long lDataLen; - pms->GetPointer(&pData); lDataLen = pms->GetSize(); + for(int i = 0; i < lDataLen; ++i) + { + pData[i] = rand(); + } - ZeroMemory(pData, lDataLen); - { - CAutoLock cAutoLockShared(&m_cSharedState); - - // If we haven't just cleared the buffer delete the old - // ball and move the ball on - - m_Ball->MoveBall(m_rtSampleTime - (LONG) m_iRepeatTime); - m_Ball->PlotBall(pData, m_BallPixel, m_iPixelSize); - - // The current time is the sample's start - CRefTime rtStart = m_rtSampleTime; - - // Increment to find the finish time - m_rtSampleTime += (LONG)m_iRepeatTime; - - pms->SetTime((REFERENCE_TIME *) &rtStart,(REFERENCE_TIME *) &m_rtSampleTime); - } - - pms->SetSyncPoint(TRUE); return NOERROR; } // FillBuffer @@ -345,7 +350,7 @@ STDMETHODIMP CBallStream::Notify(IBaseFilter * pSender, Quality q) // 4 return 8 bit palettised format // (iPosition > 4 is invalid) // -HRESULT CBallStream::GetMediaType(int iPosition, CMediaType *pmt) +/*HRESULT CBallStream::GetMediaType(int iPosition, CMediaType *pmt) { CheckPointer(pmt,E_POINTER); @@ -464,7 +469,7 @@ HRESULT CBallStream::GetMediaType(int iPosition, CMediaType *pmt) return NOERROR; } // GetMediaType - +*/ // // CheckMediaType @@ -473,7 +478,7 @@ HRESULT CBallStream::GetMediaType(int iPosition, CMediaType *pmt) // image size that gives room to bounce. // Returns E_INVALIDARG if the mediatype is not acceptable // -HRESULT CBallStream::CheckMediaType(const CMediaType *pMediaType) +/*HRESULT CBallStream::CheckMediaType(const CMediaType *pMediaType) { CheckPointer(pMediaType,E_POINTER); @@ -525,7 +530,7 @@ HRESULT CBallStream::CheckMediaType(const CMediaType *pMediaType) return S_OK; // This format is acceptable. } // CheckMediaType - +*/ // // DecideBufferSize @@ -534,7 +539,7 @@ HRESULT CBallStream::CheckMediaType(const CMediaType *pMediaType) // negotiated. So we have a look at m_mt to see what size image we agreed. // Then we can ask for buffers of the correct size to contain them. // -HRESULT CBallStream::DecideBufferSize(IMemAllocator *pAlloc, +/*HRESULT CBallStream::DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *pProperties) { CheckPointer(pAlloc,E_POINTER); @@ -575,7 +580,7 @@ HRESULT CBallStream::DecideBufferSize(IMemAllocator *pAlloc, return NOERROR; } // DecideBufferSize - +*/ // // SetMediaType @@ -586,10 +591,12 @@ HRESULT CBallStream::SetMediaType(const CMediaType *pMediaType) { CAutoLock cAutoLock(m_pFilter->pStateLock()); - // Pass the call up to my base class - + // Pass the call up to my base class + DECLARE_PTR(VIDEOINFOHEADER, pvi, pMediaType->Format()); HRESULT hr = CSourceStream::SetMediaType(pMediaType); + //HRESULT hr = CSourceStream::SetMediaType(pMediaType); + if(SUCCEEDED(hr)) { VIDEOINFO * pvi = (VIDEOINFO *) m_mt.Format(); @@ -661,7 +668,7 @@ HRESULT CBallStream::SetMediaType(const CMediaType *pMediaType) // // As we go active reset the stream time to zero // -HRESULT CBallStream::OnThreadCreate() +/*HRESULT CBallStream::OnThreadCreate() { CAutoLock cAutoLockShared(&m_cSharedState); m_rtSampleTime = 0; @@ -673,7 +680,7 @@ HRESULT CBallStream::OnThreadCreate() return NOERROR; } // OnThreadCreate - +*/ // // SetPaletteEntries @@ -728,3 +735,226 @@ HRESULT CBallStream::SetPaletteEntries(Colour color) } // SetPaletteEntries +////////////////////////////////////////////////////////////////////////// +// This is called when the output format has been negotiated +////////////////////////////////////////////////////////////////////////// + +// See Directshow help topic for IAMStreamConfig for details on this method +HRESULT CBallStream::GetMediaType(int iPosition, CMediaType *pmt) +{ + if(iPosition < 0) return E_INVALIDARG; + if(iPosition > 8) return VFW_S_NO_MORE_ITEMS; + + if(iPosition == 0) + { + *pmt = m_mt; + return S_OK; + } + + DECLARE_PTR(VIDEOINFOHEADER, pvi, pmt->AllocFormatBuffer(sizeof(VIDEOINFOHEADER))); + ZeroMemory(pvi, sizeof(VIDEOINFOHEADER)); + + pvi->bmiHeader.biCompression = BI_RGB; + pvi->bmiHeader.biBitCount = 24; + pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER); + pvi->bmiHeader.biWidth = 80 * iPosition; + pvi->bmiHeader.biHeight = 60 * iPosition; + pvi->bmiHeader.biPlanes = 1; + pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader); + pvi->bmiHeader.biClrImportant = 0; + + pvi->AvgTimePerFrame = 1000000; + + SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered. + SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle + + pmt->SetType(&MEDIATYPE_Video); + pmt->SetFormatType(&FORMAT_VideoInfo); + pmt->SetTemporalCompression(FALSE); + + // Work out the GUID for the subtype from the header info. + const GUID SubTypeGUID = GetBitmapSubtype(&pvi->bmiHeader); + pmt->SetSubtype(&SubTypeGUID); + pmt->SetSampleSize(pvi->bmiHeader.biSizeImage); + + return NOERROR; + +} // GetMediaType + +// This method is called to see if a given output format is supported +HRESULT CBallStream::CheckMediaType(const CMediaType *pMediaType) +{ + VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *)(pMediaType->Format()); + if(*pMediaType != m_mt) + return E_INVALIDARG; + return S_OK; +} // CheckMediaType + +// This method is called after the pins are connected to allocate buffers to stream data +HRESULT CBallStream::DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *pProperties) +{ + CAutoLock cAutoLock(m_pFilter->pStateLock()); + HRESULT hr = NOERROR; + + VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *) m_mt.Format(); + pProperties->cBuffers = 1; + pProperties->cbBuffer = pvi->bmiHeader.biSizeImage; + + ALLOCATOR_PROPERTIES Actual; + hr = pAlloc->SetProperties(pProperties,&Actual); + + if(FAILED(hr)) return hr; + if(Actual.cbBuffer < pProperties->cbBuffer) return E_FAIL; + + return NOERROR; +} // DecideBufferSize + +// Called when graph is run +HRESULT CBallStream::OnThreadCreate() +{ + m_iRepeatTime = m_iDefaultRepeatTime; + m_rtLastTime = 0; + return NOERROR; +} // OnThreadCreate + + +////////////////////////////////////////////////////////////////////////// +// IAMStreamConfig +////////////////////////////////////////////////////////////////////////// + +HRESULT STDMETHODCALLTYPE CBallStream::SetFormat(AM_MEDIA_TYPE *pmt) +{ + DECLARE_PTR(VIDEOINFOHEADER, pvi, m_mt.pbFormat); + m_mt = *pmt; + IPin* pin; + ConnectedTo(&pin); + if(pin) + { + IFilterGraph *pGraph = m_pParent->GetGraph(); + pGraph->Reconnect(this); + } + return S_OK; +} + +HRESULT STDMETHODCALLTYPE CBallStream::GetFormat(AM_MEDIA_TYPE **ppmt) +{ + *ppmt = CreateMediaType(&m_mt); + return S_OK; +} + +HRESULT STDMETHODCALLTYPE CBallStream::GetNumberOfCapabilities(int *piCount, int *piSize) +{ + *piCount = 8; + *piSize = sizeof(VIDEO_STREAM_CONFIG_CAPS); + return S_OK; +} + +HRESULT STDMETHODCALLTYPE CBallStream::GetStreamCaps(int iIndex, AM_MEDIA_TYPE **pmt, BYTE *pSCC) +{ + *pmt = CreateMediaType(&m_mt); + DECLARE_PTR(VIDEOINFOHEADER, pvi, (*pmt)->pbFormat); + + if (iIndex == 0) iIndex = 4; + + pvi->bmiHeader.biCompression = BI_RGB; + pvi->bmiHeader.biBitCount = 24; + pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER); + pvi->bmiHeader.biWidth = 80 * iIndex; + pvi->bmiHeader.biHeight = 60 * iIndex; + pvi->bmiHeader.biPlanes = 1; + pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader); + pvi->bmiHeader.biClrImportant = 0; + + SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered. + SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle + + (*pmt)->majortype = MEDIATYPE_Video; + (*pmt)->subtype = MEDIASUBTYPE_RGB24; + (*pmt)->formattype = FORMAT_VideoInfo; + (*pmt)->bTemporalCompression = FALSE; + (*pmt)->bFixedSizeSamples= FALSE; + (*pmt)->lSampleSize = pvi->bmiHeader.biSizeImage; + (*pmt)->cbFormat = sizeof(VIDEOINFOHEADER); + + DECLARE_PTR(VIDEO_STREAM_CONFIG_CAPS, pvscc, pSCC); + + pvscc->guid = FORMAT_VideoInfo; + pvscc->VideoStandard = AnalogVideo_None; + pvscc->InputSize.cx = 640; + pvscc->InputSize.cy = 480; + pvscc->MinCroppingSize.cx = 80; + pvscc->MinCroppingSize.cy = 60; + pvscc->MaxCroppingSize.cx = 640; + pvscc->MaxCroppingSize.cy = 480; + pvscc->CropGranularityX = 80; + pvscc->CropGranularityY = 60; + pvscc->CropAlignX = 0; + pvscc->CropAlignY = 0; + + pvscc->MinOutputSize.cx = 80; + pvscc->MinOutputSize.cy = 60; + pvscc->MaxOutputSize.cx = 640; + pvscc->MaxOutputSize.cy = 480; + pvscc->OutputGranularityX = 0; + pvscc->OutputGranularityY = 0; + pvscc->StretchTapsX = 0; + pvscc->StretchTapsY = 0; + pvscc->ShrinkTapsX = 0; + pvscc->ShrinkTapsY = 0; + pvscc->MinFrameInterval = 200000; //50 fps + pvscc->MaxFrameInterval = 50000000; // 0.2 fps + pvscc->MinBitsPerSecond = (80 * 60 * 3 * 8) / 5; + pvscc->MaxBitsPerSecond = 640 * 480 * 3 * 8 * 50; + + return S_OK; +} + +////////////////////////////////////////////////////////////////////////// +// IKsPropertySet +////////////////////////////////////////////////////////////////////////// + + +HRESULT CBallStream::Set(REFGUID guidPropSet, DWORD dwID, void *pInstanceData, + DWORD cbInstanceData, void *pPropData, DWORD cbPropData) +{// Set: Cannot set any properties. + return E_NOTIMPL; +} + +// Get: Return the pin category (our only property). +HRESULT CBallStream::Get( + REFGUID guidPropSet, // Which property set. + DWORD dwPropID, // Which property in that set. + void *pInstanceData, // Instance data (ignore). + DWORD cbInstanceData, // Size of the instance data (ignore). + void *pPropData, // Buffer to receive the property data. + DWORD cbPropData, // Size of the buffer. + DWORD *pcbReturned // Return the size of the property. +) +{ + if (guidPropSet == AMPROPSETID_Pin) + { + if (dwPropID != AMPROPERTY_PIN_CATEGORY) return E_PROP_ID_UNSUPPORTED; + if (pPropData == NULL && pcbReturned == NULL) return E_POINTER; + + if (pcbReturned) *pcbReturned = sizeof(GUID); + if (pPropData == NULL) return S_OK; // Caller just wants to know the size. + if (cbPropData < sizeof(GUID)) return E_UNEXPECTED;// The buffer is too small. + + *(GUID *)pPropData = PIN_CATEGORY_CAPTURE; + return S_OK; + } + + + + return E_PROP_SET_UNSUPPORTED; +} + +// QuerySupported: Query whether the pin supports the specified property. +HRESULT CBallStream::QuerySupported(REFGUID guidPropSet, DWORD dwPropID, DWORD *pTypeSupport) +{ + if (guidPropSet != AMPROPSETID_Pin) return E_PROP_SET_UNSUPPORTED; + if (dwPropID != AMPROPERTY_PIN_CATEGORY) return E_PROP_ID_UNSUPPORTED; + // We support getting this property, but not setting it. + if (pTypeSupport) *pTypeSupport = KSPROPERTY_SUPPORT_GET; + return S_OK; +} diff --git a/winsource/fball.h b/winsource/fball.h index 6b96f02..764bc56 100644 --- a/winsource/fball.h +++ b/winsource/fball.h @@ -7,6 +7,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. //------------------------------------------------------------------------------ +#define DECLARE_PTR(type, ptr, expr) type* ptr = (type*)(expr); + //------------------------------------------------------------------------------ // Forward Declarations //------------------------------------------------------------------------------ @@ -27,6 +29,7 @@ class CBouncingBall : public CSource // The only allowed way to create Bouncing balls! static CUnknown * WINAPI CreateInstance(LPUNKNOWN lpunk, HRESULT *phr); + IFilterGraph *GetGraph() {return m_pGraph;} private: // It is only allowed to to create these objects with CreateInstance @@ -42,7 +45,7 @@ class CBouncingBall : public CSource // data from the source filter. It inherits from DirectShows's base // CSourceStream class. //------------------------------------------------------------------------------ -class CBallStream : public CSourceStream +class CBallStream : public CSourceStream, public IAMStreamConfig, public IKsPropertySet { public: @@ -71,6 +74,19 @@ class CBallStream : public CSourceStream // Quality control notifications sent to us STDMETHODIMP Notify(IBaseFilter * pSender, Quality q); + STDMETHODIMP QueryInterface(REFIID riid, void **ppv); + STDMETHODIMP_(ULONG) AddRef() { return GetOwner()->AddRef(); } \ + STDMETHODIMP_(ULONG) Release() { return GetOwner()->Release(); } + + HRESULT STDMETHODCALLTYPE SetFormat(AM_MEDIA_TYPE *pmt); + HRESULT STDMETHODCALLTYPE GetFormat(AM_MEDIA_TYPE **ppmt); + HRESULT STDMETHODCALLTYPE GetNumberOfCapabilities(int *piCount, int *piSize); + HRESULT STDMETHODCALLTYPE GetStreamCaps(int iIndex, AM_MEDIA_TYPE **pmt, BYTE *pSCC); + + HRESULT STDMETHODCALLTYPE Set(REFGUID guidPropSet, DWORD dwID, void *pInstanceData, DWORD cbInstanceData, void *pPropData, DWORD cbPropData); + HRESULT STDMETHODCALLTYPE Get(REFGUID guidPropSet, DWORD dwPropID, void *pInstanceData,DWORD cbInstanceData, void *pPropData, DWORD cbPropData, DWORD *pcbReturned); + HRESULT STDMETHODCALLTYPE QuerySupported(REFGUID guidPropSet, DWORD dwPropID, DWORD *pTypeSupport); + private: int m_iImageHeight; // The current image height @@ -85,6 +101,9 @@ class CBallStream : public CSourceStream CCritSec m_cSharedState; // Lock on m_rtSampleTime and m_Ball CRefTime m_rtSampleTime; // The time stamp for each sample CBall *m_Ball; // The current ball object + CBouncingBall *m_pParent; + + REFERENCE_TIME m_rtLastTime; // set up the palette appropriately enum Colour {Red, Blue, Green, Yellow}; From 0943bcd3f5c0c1f2e4b97847326480cbfeba26fe Mon Sep 17 00:00:00 2001 From: TimSC Date: Fri, 22 Nov 2013 11:05:27 +0000 Subject: [PATCH 126/256] Still no luck --- winsource/fball.cpp | 406 +++----------------------------------------- winsource/fball.h | 21 +-- 2 files changed, 36 insertions(+), 391 deletions(-) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index 9710bea..7114ae7 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -177,6 +177,14 @@ CUnknown * WINAPI CBouncingBall::CreateInstance(LPUNKNOWN lpunk, HRESULT *phr) } // CreateInstance +HRESULT CBouncingBall::QueryInterface(REFIID riid, void **ppv) +{ + //Forward request for IAMStreamConfig & IKsPropertySet to the pin + if(riid == _uuidof(IAMStreamConfig) || riid == _uuidof(IKsPropertySet)) + return m_paStreams[0]->QueryInterface(riid, ppv); + else + return CSource::QueryInterface(riid, ppv); +} // // Constructor @@ -189,7 +197,7 @@ CBouncingBall::CBouncingBall(LPUNKNOWN lpunk, HRESULT *phr) : ASSERT(phr); CAutoLock cAutoLock(&m_cStateLock); - m_paStreams = (CSourceStream **) new CBallStream*[1]; + /*m_paStreams = (CSourceStream **) new CBallStream*[1]; if(m_paStreams == NULL) { if(phr) @@ -205,7 +213,7 @@ CBouncingBall::CBouncingBall(LPUNKNOWN lpunk, HRESULT *phr) : *phr = E_OUTOFMEMORY; return; - } + }*/ } // (Constructor) @@ -217,20 +225,20 @@ CBallStream::CBallStream(HRESULT *phr, CBouncingBall *pParent, LPCWSTR pPinName) : CSourceStream(NAME("Bouncing Ball"),phr, pParent, pPinName), - m_pParent(pParent), - m_iDefaultRepeatTime(20), - m_iImageWidth(320), - m_iImageHeight(240) + m_pParent(pParent) { - ASSERT(phr); - CAutoLock cAutoLock(&m_cSharedState); +// ASSERT(phr); + // CAutoLock cAutoLock(&m_cSharedState); + + // Set the default media type as 320x240x24@15 + GetMediaType(4, &m_mt); - m_Ball = new CBall(m_iImageWidth, m_iImageHeight); + /*m_Ball = new CBall(m_iImageWidth, m_iImageHeight); if(m_Ball == NULL) { if(phr) *phr = E_OUTOFMEMORY; - } + }*/ } // (Constructor) @@ -240,9 +248,9 @@ CBallStream::CBallStream(HRESULT *phr, // CBallStream::~CBallStream() { - CAutoLock cAutoLock(&m_cSharedState); +/* CAutoLock cAutoLock(&m_cSharedState); if(m_Ball) - delete m_Ball; + delete m_Ball;*/ } // (Destructor) @@ -301,287 +309,11 @@ HRESULT CBallStream::FillBuffer(IMediaSample *pms) // STDMETHODIMP CBallStream::Notify(IBaseFilter * pSender, Quality q) { - // Adjust the repeat rate. - if(q.Proportion<=0) - { - m_iRepeatTime = 1000; // We don't go slower than 1 per second - } - else - { - m_iRepeatTime = m_iRepeatTime*1000 / q.Proportion; - if(m_iRepeatTime>1000) - { - m_iRepeatTime = 1000; // We don't go slower than 1 per second - } - else if(m_iRepeatTime<10) - { - m_iRepeatTime = 10; // We don't go faster than 100/sec - } - } - - // skip forwards - if(q.Late > 0) - m_rtSampleTime += q.Late; - - return NOERROR; + return E_NOTIMPL; } // Notify -// -// GetMediaType -// -// I _prefer_ 5 formats - 8, 16 (*2), 24 or 32 bits per pixel and -// I will suggest these with an image size of 320x240. However -// I can accept any image size which gives me some space to bounce. -// -// A bit of fun: -// 8 bit displays get red balls -// 16 bit displays get blue -// 24 bit see green -// And 32 bit see yellow -// -// Prefered types should be ordered by quality, zero as highest quality -// Therefore iPosition = -// 0 return a 32bit mediatype -// 1 return a 24bit mediatype -// 2 return 16bit RGB565 -// 3 return a 16bit mediatype (rgb555) -// 4 return 8 bit palettised format -// (iPosition > 4 is invalid) -// -/*HRESULT CBallStream::GetMediaType(int iPosition, CMediaType *pmt) -{ - CheckPointer(pmt,E_POINTER); - - CAutoLock cAutoLock(m_pFilter->pStateLock()); - if(iPosition < 0) - { - return E_INVALIDARG; - } - - // Have we run off the end of types? - - if(iPosition > 4) - { - return VFW_S_NO_MORE_ITEMS; - } - - VIDEOINFO *pvi = (VIDEOINFO *) pmt->AllocFormatBuffer(sizeof(VIDEOINFO)); - if(NULL == pvi) - return(E_OUTOFMEMORY); - - ZeroMemory(pvi, sizeof(VIDEOINFO)); - - switch(iPosition) - { - case 0: - { - // Return our highest quality 32bit format - - // since we use RGB888 (the default for 32 bit), there is - // no reason to use BI_BITFIELDS to specify the RGB - // masks. Also, not everything supports BI_BITFIELDS - - SetPaletteEntries(Yellow); - pvi->bmiHeader.biCompression = BI_RGB; - pvi->bmiHeader.biBitCount = 32; - break; - } - - case 1: - { // Return our 24bit format - - SetPaletteEntries(Green); - pvi->bmiHeader.biCompression = BI_RGB; - pvi->bmiHeader.biBitCount = 24; - break; - } - - case 2: - { - // 16 bit per pixel RGB565 - - // Place the RGB masks as the first 3 doublewords in the palette area - for(int i = 0; i < 3; i++) - pvi->TrueColorInfo.dwBitMasks[i] = bits565[i]; - - SetPaletteEntries(Blue); - pvi->bmiHeader.biCompression = BI_BITFIELDS; - pvi->bmiHeader.biBitCount = 16; - break; - } - - case 3: - { // 16 bits per pixel RGB555 - - // Place the RGB masks as the first 3 doublewords in the palette area - for(int i = 0; i < 3; i++) - pvi->TrueColorInfo.dwBitMasks[i] = bits555[i]; - - SetPaletteEntries(Blue); - pvi->bmiHeader.biCompression = BI_BITFIELDS; - pvi->bmiHeader.biBitCount = 16; - break; - } - - case 4: - { // 8 bit palettised - - SetPaletteEntries(Red); - pvi->bmiHeader.biCompression = BI_RGB; - pvi->bmiHeader.biBitCount = 8; - pvi->bmiHeader.biClrUsed = iPALETTE_COLORS; - break; - } - } - - // (Adjust the parameters common to all formats...) - - // put the optimal palette in place - for(int i = 0; i < iPALETTE_COLORS; i++) - { - pvi->TrueColorInfo.bmiColors[i].rgbRed = m_Palette[i].peRed; - pvi->TrueColorInfo.bmiColors[i].rgbBlue = m_Palette[i].peBlue; - pvi->TrueColorInfo.bmiColors[i].rgbGreen = m_Palette[i].peGreen; - pvi->TrueColorInfo.bmiColors[i].rgbReserved = 0; - } - - pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER); - pvi->bmiHeader.biWidth = m_iImageWidth; - pvi->bmiHeader.biHeight = m_iImageHeight; - pvi->bmiHeader.biPlanes = 1; - pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader); - pvi->bmiHeader.biClrImportant = 0; - - SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered. - SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle - - pmt->SetType(&MEDIATYPE_Video); - pmt->SetFormatType(&FORMAT_VideoInfo); - pmt->SetTemporalCompression(FALSE); - - // Work out the GUID for the subtype from the header info. - const GUID SubTypeGUID = GetBitmapSubtype(&pvi->bmiHeader); - pmt->SetSubtype(&SubTypeGUID); - pmt->SetSampleSize(pvi->bmiHeader.biSizeImage); - - return NOERROR; - -} // GetMediaType -*/ - -// -// CheckMediaType -// -// We will accept 8, 16, 24 or 32 bit video formats, in any -// image size that gives room to bounce. -// Returns E_INVALIDARG if the mediatype is not acceptable -// -/*HRESULT CBallStream::CheckMediaType(const CMediaType *pMediaType) -{ - CheckPointer(pMediaType,E_POINTER); - - if((*(pMediaType->Type()) != MEDIATYPE_Video) || // we only output video - !(pMediaType->IsFixedSize())) // in fixed size samples - { - return E_INVALIDARG; - } - - // Check for the subtypes we support - const GUID *SubType = pMediaType->Subtype(); - if (SubType == NULL) - return E_INVALIDARG; - - if((*SubType != MEDIASUBTYPE_RGB8) - && (*SubType != MEDIASUBTYPE_RGB565) - && (*SubType != MEDIASUBTYPE_RGB555) - && (*SubType != MEDIASUBTYPE_RGB24) - && (*SubType != MEDIASUBTYPE_RGB32)) - { - return E_INVALIDARG; - } - - // Get the format area of the media type - VIDEOINFO *pvi = (VIDEOINFO *) pMediaType->Format(); - - if(pvi == NULL) - return E_INVALIDARG; - - // Check the image size. As my default ball is 10 pixels big - // look for at least a 20x20 image. This is an arbitary size constraint, - // but it avoids balls that are bigger than the picture... - - if((pvi->bmiHeader.biWidth < 20) || ( abs(pvi->bmiHeader.biHeight) < 20)) - { - return E_INVALIDARG; - } - - // Check if the image width & height have changed - if(pvi->bmiHeader.biWidth != m_Ball->GetImageWidth() || - abs(pvi->bmiHeader.biHeight) != m_Ball->GetImageHeight()) - { - // If the image width/height is changed, fail CheckMediaType() to force - // the renderer to resize the image. - return E_INVALIDARG; - } - - - return S_OK; // This format is acceptable. - -} // CheckMediaType -*/ - -// -// DecideBufferSize -// -// This will always be called after the format has been sucessfully -// negotiated. So we have a look at m_mt to see what size image we agreed. -// Then we can ask for buffers of the correct size to contain them. -// -/*HRESULT CBallStream::DecideBufferSize(IMemAllocator *pAlloc, - ALLOCATOR_PROPERTIES *pProperties) -{ - CheckPointer(pAlloc,E_POINTER); - CheckPointer(pProperties,E_POINTER); - - CAutoLock cAutoLock(m_pFilter->pStateLock()); - HRESULT hr = NOERROR; - - VIDEOINFO *pvi = (VIDEOINFO *) m_mt.Format(); - pProperties->cBuffers = 1; - pProperties->cbBuffer = pvi->bmiHeader.biSizeImage; - - ASSERT(pProperties->cbBuffer); - - // Ask the allocator to reserve us some sample memory, NOTE the function - // can succeed (that is return NOERROR) but still not have allocated the - // memory that we requested, so we must check we got whatever we wanted - - ALLOCATOR_PROPERTIES Actual; - hr = pAlloc->SetProperties(pProperties,&Actual); - if(FAILED(hr)) - { - return hr; - } - - // Is this allocator unsuitable - - if(Actual.cbBuffer < pProperties->cbBuffer) - { - return E_FAIL; - } - - // Make sure that we have only 1 buffer (we erase the ball in the - // old buffer to save having to zero a 200k+ buffer every time - // we draw a frame) - - ASSERT(Actual.cBuffers == 1); - return NOERROR; - -} // DecideBufferSize -*/ - // // SetMediaType // @@ -589,99 +321,11 @@ STDMETHODIMP CBallStream::Notify(IBaseFilter * pSender, Quality q) // HRESULT CBallStream::SetMediaType(const CMediaType *pMediaType) { - CAutoLock cAutoLock(m_pFilter->pStateLock()); - - // Pass the call up to my base class - DECLARE_PTR(VIDEOINFOHEADER, pvi, pMediaType->Format()); + DECLARE_PTR(VIDEOINFOHEADER, pvi, pMediaType->Format()); HRESULT hr = CSourceStream::SetMediaType(pMediaType); - - //HRESULT hr = CSourceStream::SetMediaType(pMediaType); - - if(SUCCEEDED(hr)) - { - VIDEOINFO * pvi = (VIDEOINFO *) m_mt.Format(); - if (pvi == NULL) - return E_UNEXPECTED; - - switch(pvi->bmiHeader.biBitCount) - { - case 8: // Make a red pixel - - m_BallPixel[0] = 10; // 0 is palette index of red - m_iPixelSize = 1; - SetPaletteEntries(Red); - break; - - case 16: // Make a blue pixel - - m_BallPixel[0] = 0xf8; // 00000000 00011111 is blue in rgb555 or rgb565 - m_BallPixel[1] = 0x0; // don't forget the byte ordering within the mask word. - m_iPixelSize = 2; - SetPaletteEntries(Blue); - break; - - case 24: // Make a green pixel - - m_BallPixel[0] = 0x0; - m_BallPixel[1] = 0xff; - m_BallPixel[2] = 0x0; - m_iPixelSize = 3; - SetPaletteEntries(Green); - break; - - case 32: // Make a yellow pixel - - m_BallPixel[0] = 0x0; - m_BallPixel[1] = 0xff; - m_BallPixel[2] = 0xff; - m_BallPixel[3] = 0x00; - m_iPixelSize = 4; - SetPaletteEntries(Yellow); - break; - - default: - // We should never agree any other pixel sizes - ASSERT(FALSE); - break; - } - - CBall *pNewBall = new CBall(pvi->bmiHeader.biWidth, abs(pvi->bmiHeader.biHeight)); - - if(pNewBall) - { - delete m_Ball; - m_Ball = pNewBall; - } - else - hr = E_OUTOFMEMORY; - - return NOERROR; - } - return hr; - } // SetMediaType - -// -// OnThreadCreate -// -// As we go active reset the stream time to zero -// -/*HRESULT CBallStream::OnThreadCreate() -{ - CAutoLock cAutoLockShared(&m_cSharedState); - m_rtSampleTime = 0; - - // we need to also reset the repeat time in case the system - // clock is turned off after m_iRepeatTime gets very big - m_iRepeatTime = m_iDefaultRepeatTime; - - return NOERROR; - -} // OnThreadCreate -*/ - // // SetPaletteEntries // @@ -691,7 +335,7 @@ HRESULT CBallStream::SetMediaType(const CMediaType *pMediaType) // palette slot is at m_Palette[10], so put our first colour there. Also // guarantees that black is always represented by zero in the frame buffer // -HRESULT CBallStream::SetPaletteEntries(Colour color) +/*HRESULT CBallStream::SetPaletteEntries(Colour color) { CAutoLock cAutoLock(m_pFilter->pStateLock()); @@ -733,7 +377,7 @@ HRESULT CBallStream::SetPaletteEntries(Colour color) return NOERROR; } // SetPaletteEntries - +*/ ////////////////////////////////////////////////////////////////////////// // This is called when the output format has been negotiated @@ -812,7 +456,7 @@ HRESULT CBallStream::DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIE // Called when graph is run HRESULT CBallStream::OnThreadCreate() { - m_iRepeatTime = m_iDefaultRepeatTime; + //m_iRepeatTime = m_iDefaultRepeatTime; m_rtLastTime = 0; return NOERROR; } // OnThreadCreate diff --git a/winsource/fball.h b/winsource/fball.h index 764bc56..d6e9bae 100644 --- a/winsource/fball.h +++ b/winsource/fball.h @@ -29,6 +29,7 @@ class CBouncingBall : public CSource // The only allowed way to create Bouncing balls! static CUnknown * WINAPI CreateInstance(LPUNKNOWN lpunk, HRESULT *phr); + STDMETHODIMP QueryInterface(REFIID riid, void **ppv); IFilterGraph *GetGraph() {return m_pGraph;} private: @@ -89,25 +90,25 @@ class CBallStream : public CSourceStream, public IAMStreamConfig, public IKsProp private: - int m_iImageHeight; // The current image height - int m_iImageWidth; // And current image width - int m_iRepeatTime; // Time in msec between frames - const int m_iDefaultRepeatTime; // Initial m_iRepeatTime + //int m_iImageHeight; // The current image height + //int m_iImageWidth; // And current image width + //int m_iRepeatTime; // Time in msec between frames + //const int m_iDefaultRepeatTime; // Initial m_iRepeatTime - BYTE m_BallPixel[4]; // Represents one coloured ball - int m_iPixelSize; // The pixel size in bytes - PALETTEENTRY m_Palette[256]; // The optimal palette for the image + //BYTE m_BallPixel[4]; // Represents one coloured ball + //int m_iPixelSize; // The pixel size in bytes + //PALETTEENTRY m_Palette[256]; // The optimal palette for the image CCritSec m_cSharedState; // Lock on m_rtSampleTime and m_Ball CRefTime m_rtSampleTime; // The time stamp for each sample - CBall *m_Ball; // The current ball object + //CBall *m_Ball; // The current ball object CBouncingBall *m_pParent; REFERENCE_TIME m_rtLastTime; // set up the palette appropriately - enum Colour {Red, Blue, Green, Yellow}; - HRESULT SetPaletteEntries(Colour colour); + //enum Colour {Red, Blue, Green, Yellow}; + //HRESULT SetPaletteEntries(Colour colour); }; // CBallStream From 54586aa914b678acde5eaeaa3cae50452571bb56 Mon Sep 17 00:00:00 2001 From: TimSC Date: Fri, 22 Nov 2013 11:11:19 +0000 Subject: [PATCH 127/256] Skype connects --- winsource/fball.cpp | 93 +++------------------------------------------ 1 file changed, 6 insertions(+), 87 deletions(-) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index 7114ae7..a607fcb 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -166,13 +166,7 @@ BOOL APIENTRY DllMain(HANDLE hModule, CUnknown * WINAPI CBouncingBall::CreateInstance(LPUNKNOWN lpunk, HRESULT *phr) { ASSERT(phr); - CUnknown *punk = new CBouncingBall(lpunk, phr); - if(punk == NULL) - { - if(phr) - *phr = E_OUTOFMEMORY; - } return punk; } // CreateInstance @@ -194,26 +188,12 @@ HRESULT CBouncingBall::QueryInterface(REFIID riid, void **ppv) CBouncingBall::CBouncingBall(LPUNKNOWN lpunk, HRESULT *phr) : CSource(NAME("Bouncing ball"), lpunk, CLSID_BouncingBall) { - ASSERT(phr); - CAutoLock cAutoLock(&m_cStateLock); - - /*m_paStreams = (CSourceStream **) new CBallStream*[1]; - if(m_paStreams == NULL) - { - if(phr) - *phr = E_OUTOFMEMORY; - return; - } - - m_paStreams[0] = new CBallStream(phr, this, L"A Bouncing Ball!"); - if(m_paStreams[0] == NULL) - { - if(phr) - *phr = E_OUTOFMEMORY; - - return; - }*/ + ASSERT(phr); + CAutoLock cAutoLock(&m_cStateLock); + // Create the one and only output pin + m_paStreams = (CSourceStream **) new CBallStream*[1]; + m_paStreams[0] = new CBallStream(phr, this, L"Bouncing Ball"); } // (Constructor) @@ -233,12 +213,7 @@ CBallStream::CBallStream(HRESULT *phr, // Set the default media type as 320x240x24@15 GetMediaType(4, &m_mt); - /*m_Ball = new CBall(m_iImageWidth, m_iImageHeight); - if(m_Ball == NULL) - { - if(phr) - *phr = E_OUTOFMEMORY; - }*/ + } // (Constructor) @@ -248,9 +223,6 @@ CBallStream::CBallStream(HRESULT *phr, // CBallStream::~CBallStream() { -/* CAutoLock cAutoLock(&m_cSharedState); - if(m_Ball) - delete m_Ball;*/ } // (Destructor) @@ -326,59 +298,6 @@ HRESULT CBallStream::SetMediaType(const CMediaType *pMediaType) return hr; } // SetMediaType -// -// SetPaletteEntries -// -// If we set our palette to the current system palette + the colours we want -// the system has the least amount of work to do whilst plotting our images, -// if this stream is rendered to the current display. The first non reserved -// palette slot is at m_Palette[10], so put our first colour there. Also -// guarantees that black is always represented by zero in the frame buffer -// -/*HRESULT CBallStream::SetPaletteEntries(Colour color) -{ - CAutoLock cAutoLock(m_pFilter->pStateLock()); - - HDC hdc = GetDC(NULL); // hdc for the current display. - UINT res = GetSystemPaletteEntries(hdc, 0, iPALETTE_COLORS, (LPPALETTEENTRY) &m_Palette); - ReleaseDC(NULL, hdc); - - if(res == 0) - return E_FAIL; - - switch(color) - { - case Red: - m_Palette[10].peBlue = 0; - m_Palette[10].peGreen = 0; - m_Palette[10].peRed = 0xff; - break; - - case Yellow: - m_Palette[10].peBlue = 0; - m_Palette[10].peGreen = 0xff; - m_Palette[10].peRed = 0xff; - break; - - case Blue: - m_Palette[10].peBlue = 0xff; - m_Palette[10].peGreen = 0; - m_Palette[10].peRed = 0; - break; - - case Green: - m_Palette[10].peBlue = 0; - m_Palette[10].peGreen = 0xff; - m_Palette[10].peRed = 0; - break; - } - - m_Palette[10].peFlags = 0; - return NOERROR; - -} // SetPaletteEntries -*/ - ////////////////////////////////////////////////////////////////////////// // This is called when the output format has been negotiated ////////////////////////////////////////////////////////////////////////// From e36e5e29dd61be754bc607636ff24450a773a57d Mon Sep 17 00:00:00 2001 From: TimSC Date: Fri, 22 Nov 2013 11:17:39 +0000 Subject: [PATCH 128/256] update headers and guids --- winsource/ball.cpp | 58 --------------------------------------------- winsource/ball.h | 15 +++--------- winsource/fball.cpp | 18 -------------- winsource/fball.h | 9 +------ 4 files changed, 4 insertions(+), 96 deletions(-) diff --git a/winsource/ball.cpp b/winsource/ball.cpp index d46e723..39654b1 100644 --- a/winsource/ball.cpp +++ b/winsource/ball.cpp @@ -1,61 +1,3 @@ -//------------------------------------------------------------------------------ -// File: Ball.cpp -// -// Desc: DirectShow sample code. This sample illustrates a simple source -// filter that produces decompressed images showing a ball bouncing -// around. Each movement of the ball is done by generating a new image. -// We use the CSource and CSourceStream base classes to manage a source -// filter - we are a live source and so do not support any seeking. -// -// The image stream is never-ending, with the ball color dependent on -// bit depth of the current display device. 32, 24, 16 (555 and 565), -// and 8 bit palettized types can be supplied. -// -// In implementation, the CSource and CSourceStream base classes from -// the SDK are used to implement some of the more tedious effort -// associated with source filters. In particular, the starting and -// stopping of worker threads based upon overall activation/stopping -// is facilitated. A worker thread sits in a loop asking for buffers -// and then calls the PURE virtual FillBuffer method when it has a -// buffer available to fill. -// -// The sample also has a simple quality management implementation in -// the filter. With the exception of renderers (which normally initiate -// it), this is controlled through IQualityControl. In each frame it -// is called for status. Due to the straightforward nature of the -// filter, spacing of samples sent downward can be controlled so that -// any CPU used runs flat out. -// -// Demonstration instructions: -// -// Start GraphEdit, which is available in the SDK DXUtils folder. Click -// on the Graph menu and select "Insert Filters." From the dialog box, -// double click on "DirectShow filters," then "Bouncing ball" and then -// dismiss the dialog. Go to the output pin of the filter box and -// right click, selecting "Render." A video renderer will be inserted -// and connected up (on some displays there may be a color space -// convertor put between them to get the pictures into a suitable -// format). Then click "run" on GraphEdit and see the ball bounce -// around the window... -// -// Files: -// -// ball.cpp Looks after drawing a moving bouncing ball -// ball.h Class definition for the ball drawing object -// ball.rc Version and title information resources -// fball.cpp The real filter class implementation -// fball.h Class definition for the main filter object -// resource.h A couple of identifiers for our resources -// -// Base classes used: -// -// CSource Base class for a generic source filter -// CSourceStream A base class for a source filters stream -// -// -// Copyright (c) Microsoft Corporation. All rights reserved. -//------------------------------------------------------------------------------ - #include #include "ball.h" diff --git a/winsource/ball.h b/winsource/ball.h index 8285788..a501d71 100644 --- a/winsource/ball.h +++ b/winsource/ball.h @@ -1,19 +1,10 @@ -//------------------------------------------------------------------------------ -// File: Ball.h -// -// Desc: DirectShow sample code - header file for the bouncing ball -// source filter. For more information, refer to Ball.cpp. -// -// Copyright (c) Microsoft Corporation. All rights reserved. -//------------------------------------------------------------------------------ //------------------------------------------------------------------------------ // Define GUIDS used in this sample //------------------------------------------------------------------------------ -// { fd501041-8ebe-11ce-8183-00aa00577da1 } -DEFINE_GUID(CLSID_BouncingBall, -0xfd501041, 0x8ebe, 0x11ce, 0x81, 0x83, 0x00, 0xaa, 0x00, 0x57, 0x7d, 0xa1); - +// {3A24BD2F-B9B1-4B32-9A1E-17791624B6AB} +DEFINE_GUID(CLSID_BouncingBall, +0x3a24bd2f, 0xb9b1, 0x4b32, 0x9a, 0x1e, 0x17, 0x79, 0x16, 0x24, 0xb6, 0xab); //------------------------------------------------------------------------------ // Class CBall diff --git a/winsource/fball.cpp b/winsource/fball.cpp index a607fcb..aeb85fb 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -1,12 +1,3 @@ -//------------------------------------------------------------------------------ -// File: FBall.cpp -// -// Desc: DirectShow sample code - implementation of filter behaviors -// for the bouncing ball source filter. For more information, -// refer to Ball.cpp. -// -// Copyright (c) Microsoft Corporation. All rights reserved. -//------------------------------------------------------------------------------ #include #include @@ -197,7 +188,6 @@ CBouncingBall::CBouncingBall(LPUNKNOWN lpunk, HRESULT *phr) : } // (Constructor) - // // Constructor // @@ -207,17 +197,9 @@ CBallStream::CBallStream(HRESULT *phr, CSourceStream(NAME("Bouncing Ball"),phr, pParent, pPinName), m_pParent(pParent) { -// ASSERT(phr); - // CAutoLock cAutoLock(&m_cSharedState); - - // Set the default media type as 320x240x24@15 GetMediaType(4, &m_mt); - - - } // (Constructor) - // // Destructor // diff --git a/winsource/fball.h b/winsource/fball.h index d6e9bae..df3bccb 100644 --- a/winsource/fball.h +++ b/winsource/fball.h @@ -1,11 +1,4 @@ -//------------------------------------------------------------------------------ -// File: FBall.h -// -// Desc: DirectShow sample code - main header file for the bouncing ball -// source filter. For more information refer to Ball.cpp -// -// Copyright (c) Microsoft Corporation. All rights reserved. -//------------------------------------------------------------------------------ + #define DECLARE_PTR(type, ptr, expr) type* ptr = (type*)(expr); From ff9c9adb4782414e89a259471d8e0d0597de1c1a Mon Sep 17 00:00:00 2001 From: TimSC Date: Fri, 22 Nov 2013 11:20:11 +0000 Subject: [PATCH 129/256] Remove unnecessary files --- winsource/Ball.vcxproj | 2 - winsource/ball.cpp | 142 ----------------------------------------- winsource/ball.h | 61 ------------------ winsource/fball.cpp | 1 - 4 files changed, 206 deletions(-) delete mode 100644 winsource/ball.cpp delete mode 100644 winsource/ball.h diff --git a/winsource/Ball.vcxproj b/winsource/Ball.vcxproj index 42de6a9..5119478 100644 --- a/winsource/Ball.vcxproj +++ b/winsource/Ball.vcxproj @@ -190,14 +190,12 @@ - - diff --git a/winsource/ball.cpp b/winsource/ball.cpp deleted file mode 100644 index 39654b1..0000000 --- a/winsource/ball.cpp +++ /dev/null @@ -1,142 +0,0 @@ - -#include -#include "ball.h" - - -//------------------------------------------------------------------------------ -// Name: CBall::CBall(() -// Desc: Constructor for the ball class. The default arguments provide a -// reasonable image and ball size. -//------------------------------------------------------------------------------ -CBall::CBall(int iImageWidth, int iImageHeight, int iBallSize) : - m_iImageWidth(iImageWidth), - m_iImageHeight(iImageHeight), - m_iBallSize(iBallSize), - m_iAvailableWidth(iImageWidth - iBallSize), - m_iAvailableHeight(iImageHeight - iBallSize), - m_x(0), - m_y(0), - m_xDir(RIGHT), - m_yDir(UP) -{ - // Check we have some (arbitrary) space to bounce in. - ASSERT(iImageWidth > 2*iBallSize); - ASSERT(iImageHeight > 2*iBallSize); - - // Random position for showing off a video mixer - m_iRandX = rand(); - m_iRandY = rand(); - -} // (Constructor) - - -//------------------------------------------------------------------------------ -// Name: CBall::PlotBall() -// Desc: Positions the ball on the memory buffer. -// Assumes the image buffer is arranged as Row 1,Row 2,...,Row n -// in memory and that the data is contiguous. -//------------------------------------------------------------------------------ -void CBall::PlotBall(BYTE pFrame[], BYTE BallPixel[], int iPixelSize) -{ - ASSERT(m_x >= 0); - ASSERT(m_x <= m_iAvailableWidth); - ASSERT(m_y >= 0); - ASSERT(m_y <= m_iAvailableHeight); - ASSERT(pFrame != NULL); - ASSERT(BallPixel != NULL); - - // The current byte of interest in the frame - BYTE *pBack; - pBack = pFrame; - - // Plot the ball into the correct location - BYTE *pBall = pFrame + ( m_y * m_iImageWidth * iPixelSize) + m_x * iPixelSize; - - for(int row = 0; row < m_iBallSize; row++) - { - for(int col = 0; col < m_iBallSize; col++) - { - // For each byte fill its value from BallPixel[] - for(int i = 0; i < iPixelSize; i++) - { - if(WithinCircle(col, row)) - { - *pBall = BallPixel[i]; - } - pBall++; - } - } - pBall += m_iAvailableWidth * iPixelSize; - } - -} // PlotBall - - -//------------------------------------------------------------------------------ -// CBall::BallPosition() -// -// Returns the 1-dimensional position of the ball at time t millisecs -// (note that millisecs runs out after about a month!) -//------------------------------------------------------------------------------ -int CBall::BallPosition(int iPixelTime, // Millisecs per pixel - int iLength, // Distance between the bounce points - int time, // Time in millisecs - int iOffset) // For a bit of randomness -{ - // Calculate the position of an unconstrained ball (no walls) - // then fold it back and forth to calculate the actual position - - int x = time / iPixelTime; - x += iOffset; - x %= 2 * iLength; - - // check it is still in bounds - if(x > iLength) - { - x = 2*iLength - x; - } - return x; - -} // BallPosition - - -//------------------------------------------------------------------------------ -// CBall::MoveBall() -// -// Set (m_x, m_y) to the new position of the ball. move diagonally -// with speed m_v in each of x and y directions. -// Guarantees to keep the ball in valid areas of the frame. -// When it hits an edge the ball bounces in the traditional manner!. -// The boundaries are (0..m_iAvailableWidth, 0..m_iAvailableHeight) -// -//------------------------------------------------------------------------------ -void CBall::MoveBall(CRefTime rt) -{ - m_x = BallPosition(10, m_iAvailableWidth, rt.Millisecs(), m_iRandX); - m_y = BallPosition(10, m_iAvailableHeight, rt.Millisecs(), m_iRandY); - -} // MoveBall - - -//------------------------------------------------------------------------------ -// CBall:WithinCircle() -// -// Return TRUE if (x,y) is within a circle radius S/2, center (S/2, S/2) -// where S is m_iBallSize else return FALSE -//------------------------------------------------------------------------------ -inline BOOL CBall::WithinCircle(int x, int y) -{ - unsigned int r = m_iBallSize / 2; - - if((x-r)*(x-r) + (y-r)*(y-r) < r*r) - { - return TRUE; - } - else - { - return FALSE; - } - -} // WithinCircle - - diff --git a/winsource/ball.h b/winsource/ball.h deleted file mode 100644 index a501d71..0000000 --- a/winsource/ball.h +++ /dev/null @@ -1,61 +0,0 @@ - -//------------------------------------------------------------------------------ -// Define GUIDS used in this sample -//------------------------------------------------------------------------------ -// {3A24BD2F-B9B1-4B32-9A1E-17791624B6AB} -DEFINE_GUID(CLSID_BouncingBall, -0x3a24bd2f, 0xb9b1, 0x4b32, 0x9a, 0x1e, 0x17, 0x79, 0x16, 0x24, 0xb6, 0xab); - -//------------------------------------------------------------------------------ -// Class CBall -// -// This class encapsulates the behavior of the bounching ball over time -//------------------------------------------------------------------------------ -class CBall -{ -public: - - CBall(int iImageWidth = 320, int iImageHeight = 240, int iBallSize = 10); - - // Plots the square ball in the image buffer, at the current location. - // Use BallPixel[] as pixel value for the ball. - // Plots zero in all 'background' image locations. - // iPixelSize - the number of bytes in a pixel (size of BallPixel[]) - void PlotBall(BYTE pFrame[], BYTE BallPixel[], int iPixelSize); - - // Moves the ball 1 pixel in each of the x and y directions - void MoveBall(CRefTime rt); - - int GetImageWidth() { return m_iImageWidth ;} - int GetImageHeight() { return m_iImageHeight ;} - -private: - - enum xdir { LEFT = -1, RIGHT = 1 }; - enum ydir { UP = 1, DOWN = -1 }; - - // The dimensions we can plot in, allowing for the width of the ball - int m_iAvailableHeight, m_iAvailableWidth; - - int m_iImageHeight; // The image height - int m_iImageWidth; // The image width - int m_iBallSize; // The diameter of the ball - int m_iRandX, m_iRandY; // For a bit of randomness - xdir m_xDir; // Direction the ball - ydir m_yDir; // Likewise vertically - - // The X position, in pixels, of the ball in the frame - // (0 < x < m_iAvailableWidth) - int m_x; - - // The Y position, in pixels, of the ball in the frame - // (0 < y < m_iAvailableHeight) - int m_y; - - // Return the one-dimensional position of the ball at time T milliseconds - int BallPosition(int iPixelTime, int iLength, int time, int iOffset); - - /// Tests a given pixel to see if it should be plotted - BOOL WithinCircle(int x, int y); - -}; // CBall diff --git a/winsource/fball.cpp b/winsource/fball.cpp index aeb85fb..9c6534e 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -2,7 +2,6 @@ #include #include #include -#include "ball.h" #include "fball.h" #pragma warning(disable:4710) // 'function': function not inlined (optimzation) From cbc0c9bcb2762cb253763fbf3e87d4059403190d Mon Sep 17 00:00:00 2001 From: TimSC Date: Fri, 22 Nov 2013 11:23:13 +0000 Subject: [PATCH 130/256] Reinstate guid --- winsource/fball.h | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/winsource/fball.h b/winsource/fball.h index df3bccb..4fdb841 100644 --- a/winsource/fball.h +++ b/winsource/fball.h @@ -1,4 +1,6 @@ - +// {3A24BD2F-B9B1-4B32-9A1E-17791624B6AB} +DEFINE_GUID(CLSID_BouncingBall, +0x3a24bd2f, 0xb9b1, 0x4b32, 0x9a, 0x1e, 0x17, 0x79, 0x16, 0x24, 0xb6, 0xab); #define DECLARE_PTR(type, ptr, expr) type* ptr = (type*)(expr); From 96d08bdc555cbedbe15337e8c97494f2777794ce Mon Sep 17 00:00:00 2001 From: TimSC Date: Fri, 22 Nov 2013 11:38:30 +0000 Subject: [PATCH 131/256] Load test pattern into buffer --- winsource/fball.cpp | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index 9c6534e..bd93b88 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -232,8 +232,12 @@ HRESULT CBallStream::FillBuffer(IMediaSample *pms) { REFERENCE_TIME rtNow; + VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *)(m_mt.Format()); REFERENCE_TIME avgFrameTime = ((VIDEOINFOHEADER*)m_mt.pbFormat)->AvgTimePerFrame; + LONG width = pvi->bmiHeader.biWidth; + LONG height = pvi->bmiHeader.biHeight; + rtNow = m_rtLastTime; m_rtLastTime += avgFrameTime; pms->SetTime(&rtNow, &m_rtLastTime); @@ -243,10 +247,19 @@ HRESULT CBallStream::FillBuffer(IMediaSample *pms) long lDataLen; pms->GetPointer(&pData); lDataLen = pms->GetSize(); - for(int i = 0; i < lDataLen; ++i) - { - pData[i] = rand(); - } + + unsigned cursor = 0; + for(LONG y=0; y < height; y++) + for(LONG x=0; x < width; x++) + { + if(cursor > lDataLen) continue; + + pData[cursor] = x % 255; //Blue + pData[cursor+1] = y % 255; //Green + pData[cursor+2] = rand(); //Red + + cursor += 3; + } return NOERROR; From 6f0910e419d103ca058998df1a45e5348333bdab Mon Sep 17 00:00:00 2001 From: TimSC Date: Fri, 22 Nov 2013 15:50:26 +0000 Subject: [PATCH 132/256] Dshow source connects to named pipe --- winsource/fball.cpp | 53 +++++++++++++++++++++++++++++++++++++++++++++ winsource/fball.h | 6 +++++ 2 files changed, 59 insertions(+) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index bd93b88..a296eeb 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -197,6 +197,11 @@ CBallStream::CBallStream(HRESULT *phr, m_pParent(pParent) { GetMediaType(4, &m_mt); + + memset(&this->rxo, 0x00, sizeof(OVERLAPPED)); + memset(&this->txo, 0x00, sizeof(OVERLAPPED)); + this->pipeHandle = 0; + } // (Constructor) // @@ -515,3 +520,51 @@ HRESULT CBallStream::QuerySupported(REFGUID guidPropSet, DWORD dwPropID, DWORD * if (pTypeSupport) *pTypeSupport = KSPROPERTY_SUPPORT_GET; return S_OK; } + +DWORD CBallStream::ThreadProc() +{ + + if(this->pipeHandle == 0) + { + LPCTSTR n = L"\\\\.\\pipe\\testpipe"; + + this->pipeHandle = CreateFile(n, + GENERIC_READ | GENERIC_WRITE, + FILE_SHARE_READ | FILE_SHARE_WRITE, + NULL, + OPEN_EXISTING, + FILE_ATTRIBUTE_NORMAL | FILE_FLAG_OVERLAPPED, + NULL); + } + + if(this->pipeHandle != 0) + { + //Transmit test message using named pipe + DWORD bytesWritten = 0; + char test[] = "Test Message"; + + if(HasOverlappedIoCompleted(&this->txo)) + { + BOOL res = WriteFileEx(this->pipeHandle, test, strlen(test), &this->txo, NULL); + } + + BOOL res = GetOverlappedResult(this->pipeHandle, &txo, &bytesWritten, TRUE); + + //Receive messages from named pipe + char buff[1000]; + DWORD bytesRead = 0; + + if(HasOverlappedIoCompleted(&this->rxo)) + { + res = ReadFileEx(this->pipeHandle, + buff, + 1000, + &rxo, + NULL); + } + + res = GetOverlappedResult(this->pipeHandle, &this->rxo, &bytesRead, FALSE); + } + + return CSourceStream::ThreadProc(); +} \ No newline at end of file diff --git a/winsource/fball.h b/winsource/fball.h index 4fdb841..5a873ad 100644 --- a/winsource/fball.h +++ b/winsource/fball.h @@ -70,6 +70,8 @@ class CBallStream : public CSourceStream, public IAMStreamConfig, public IKsProp // Quality control notifications sent to us STDMETHODIMP Notify(IBaseFilter * pSender, Quality q); + DWORD ThreadProc(); + STDMETHODIMP QueryInterface(REFIID riid, void **ppv); STDMETHODIMP_(ULONG) AddRef() { return GetOwner()->AddRef(); } \ STDMETHODIMP_(ULONG) Release() { return GetOwner()->Release(); } @@ -101,6 +103,10 @@ class CBallStream : public CSourceStream, public IAMStreamConfig, public IKsProp REFERENCE_TIME m_rtLastTime; + HANDLE pipeHandle; + OVERLAPPED rxo; + OVERLAPPED txo; + // set up the palette appropriately //enum Colour {Red, Blue, Green, Yellow}; //HRESULT SetPaletteEntries(Colour colour); From f9198ca00fb1579fdb39a71191be7cef4d1b2591 Mon Sep 17 00:00:00 2001 From: TimSC Date: Fri, 22 Nov 2013 16:09:47 +0000 Subject: [PATCH 133/256] Store current frame in memory --- winsource/fball.cpp | 31 ++++++++++++++++++++++++------- winsource/fball.h | 3 +++ 2 files changed, 27 insertions(+), 7 deletions(-) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index a296eeb..b88f69b 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -202,6 +202,9 @@ CBallStream::CBallStream(HRESULT *phr, memset(&this->txo, 0x00, sizeof(OVERLAPPED)); this->pipeHandle = 0; + this->currentFrame = NULL; + this->currentFrameLen = 0; + } // (Constructor) // @@ -209,10 +212,16 @@ CBallStream::CBallStream(HRESULT *phr, // CBallStream::~CBallStream() { + if(this->pipeHandle != 0) + CloseHandle(this->pipeHandle); + if(this->currentFrame != NULL) + delete [] this->currentFrame; + this->currentFrame = NULL; + this->currentFrameLen = 0; + this->pipeHandle = 0; } // (Destructor) - HRESULT CBallStream::QueryInterface(REFIID riid, void **ppv) { // Standard OLE stuff @@ -253,18 +262,26 @@ HRESULT CBallStream::FillBuffer(IMediaSample *pms) pms->GetPointer(&pData); lDataLen = pms->GetSize(); - unsigned cursor = 0; - for(LONG y=0; y < height; y++) + if(this->currentFrameLen != lDataLen || this->currentFrame == NULL) + { + this->currentFrame = new BYTE[lDataLen]; + this->currentFrameLen = lDataLen; + + long cursor = 0; + for(LONG y=0; y < height; y++) for(LONG x=0; x < width; x++) { - if(cursor > lDataLen) continue; + if(cursor > this->currentFrameLen) continue; - pData[cursor] = x % 255; //Blue - pData[cursor+1] = y % 255; //Green - pData[cursor+2] = rand(); //Red + this->currentFrame[cursor] = x % 255; //Blue + this->currentFrame[cursor+1] = y % 255; //Green + this->currentFrame[cursor+2] = rand(); //Red cursor += 3; } + } + + memcpy(pData, this->currentFrame, lDataLen); return NOERROR; diff --git a/winsource/fball.h b/winsource/fball.h index 5a873ad..da8fbbd 100644 --- a/winsource/fball.h +++ b/winsource/fball.h @@ -107,6 +107,9 @@ class CBallStream : public CSourceStream, public IAMStreamConfig, public IKsProp OVERLAPPED rxo; OVERLAPPED txo; + BYTE *currentFrame; + LONG currentFrameLen; + // set up the palette appropriately //enum Colour {Red, Blue, Green, Yellow}; //HRESULT SetPaletteEntries(Colour colour); From ce2401d7b02d33fb29696dd9c2f07127558ca390 Mon Sep 17 00:00:00 2001 From: TimSC Date: Fri, 22 Nov 2013 18:26:48 +0000 Subject: [PATCH 134/256] Suspect that there are multiple instances of the class --- winsource/fball.cpp | 167 ++++++++++++++++++++++++++++++++------------ winsource/fball.h | 2 + 2 files changed, 123 insertions(+), 46 deletions(-) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index b88f69b..cdf478c 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -200,10 +200,11 @@ CBallStream::CBallStream(HRESULT *phr, memset(&this->rxo, 0x00, sizeof(OVERLAPPED)); memset(&this->txo, 0x00, sizeof(OVERLAPPED)); - this->pipeHandle = 0; + this->pipeHandle = INVALID_HANDLE_VALUE; this->currentFrame = NULL; this->currentFrameLen = 0; + this->testCursor = 0; } // (Constructor) @@ -236,6 +237,108 @@ HRESULT CBallStream::QueryInterface(REFIID riid, void **ppv) return S_OK; } +void CBallStream::UpdateNamedPipe() +{ + /*if(this->currentFrame!=NULL) + { + delete [] this->currentFrame; + this->currentFrame = NULL; + this->currentFrameLen = 0; + } + + this->currentFrame = NULL;*/ + + if(this->pipeHandle == INVALID_HANDLE_VALUE) + { + LPCTSTR n = L"\\\\.\\pipe\\testpipe"; + + this->pipeHandle = CreateFile(n, + GENERIC_READ | GENERIC_WRITE, + FILE_SHARE_READ | FILE_SHARE_WRITE, + NULL, + OPEN_EXISTING, + FILE_ATTRIBUTE_NORMAL | FILE_FLAG_OVERLAPPED, + NULL); + } + + if(this->pipeHandle == INVALID_HANDLE_VALUE) + { + for(DWORD i=0; icurrentFrameLen; i++) + { + if(i%3==1) + this->currentFrame[i] = 0x255; + else + this->currentFrame[i] = 0x0; + } + } + else + { + for(DWORD i=0; icurrentFrameLen; i++) + { + if(i%3==0) + this->currentFrame[i] = 0x255; + else + this->currentFrame[i] = 0x0; + } + } + + if(this->pipeHandle != INVALID_HANDLE_VALUE) + { + for(DWORD i=0; icurrentFrameLen; i++) + { + this->currentFrame[i] = 0x255; + } + + + //Transmit test message using named pipe + DWORD bytesWritten = 0; + char test[] = "Test Message"; + + if(HasOverlappedIoCompleted(&this->txo)) + { + BOOL res = WriteFileEx(this->pipeHandle, test, strlen(test), &this->txo, NULL); + } + + BOOL res = GetOverlappedResult(this->pipeHandle, &txo, &bytesWritten, TRUE); + + //Receive messages from named pipe + char buff[1000]; + DWORD bytesRead = 0; + + if(HasOverlappedIoCompleted(&this->rxo)) + { + res = ReadFileEx(this->pipeHandle, + buff, + 1000, + &rxo, + NULL); + } + + res = GetOverlappedResult(this->pipeHandle, &this->rxo, &bytesRead, FALSE); + + if(res) + { + for(DWORD i=0; icurrentFrame[i] = 0x255; + testCursor += 1; + if(testCursor >= this->currentFrameLen) + this->testCursor = 0; + } + } + + } + + /*if(this->currentFrame != NULL) + { + for(DWORD i=0; icurrentFrame[i] = rand(); + } + }*/ + +} + // // FillBuffer @@ -262,7 +365,16 @@ HRESULT CBallStream::FillBuffer(IMediaSample *pms) pms->GetPointer(&pData); lDataLen = pms->GetSize(); - if(this->currentFrameLen != lDataLen || this->currentFrame == NULL) + /*if(this->currentFrame != NULL) + { + delete [] this->currentFrame; + this->currentFrame = NULL; + this->currentFrameLen = 0; + }*/ + + this->UpdateNamedPipe(); + + if(this->currentFrame == NULL) { this->currentFrame = new BYTE[lDataLen]; this->currentFrameLen = lDataLen; @@ -281,7 +393,13 @@ HRESULT CBallStream::FillBuffer(IMediaSample *pms) } } - memcpy(pData, this->currentFrame, lDataLen); + if(this->currentFrame != NULL) + memcpy(pData, this->currentFrame, lDataLen); + + /*for(LONG i=0;ipipeHandle == 0) - { - LPCTSTR n = L"\\\\.\\pipe\\testpipe"; - - this->pipeHandle = CreateFile(n, - GENERIC_READ | GENERIC_WRITE, - FILE_SHARE_READ | FILE_SHARE_WRITE, - NULL, - OPEN_EXISTING, - FILE_ATTRIBUTE_NORMAL | FILE_FLAG_OVERLAPPED, - NULL); - } - - if(this->pipeHandle != 0) - { - //Transmit test message using named pipe - DWORD bytesWritten = 0; - char test[] = "Test Message"; - - if(HasOverlappedIoCompleted(&this->txo)) - { - BOOL res = WriteFileEx(this->pipeHandle, test, strlen(test), &this->txo, NULL); - } - - BOOL res = GetOverlappedResult(this->pipeHandle, &txo, &bytesWritten, TRUE); - - //Receive messages from named pipe - char buff[1000]; - DWORD bytesRead = 0; - - if(HasOverlappedIoCompleted(&this->rxo)) - { - res = ReadFileEx(this->pipeHandle, - buff, - 1000, - &rxo, - NULL); - } - - res = GetOverlappedResult(this->pipeHandle, &this->rxo, &bytesRead, FALSE); - } - return CSourceStream::ThreadProc(); } \ No newline at end of file diff --git a/winsource/fball.h b/winsource/fball.h index da8fbbd..aaad0ea 100644 --- a/winsource/fball.h +++ b/winsource/fball.h @@ -71,6 +71,7 @@ class CBallStream : public CSourceStream, public IAMStreamConfig, public IKsProp STDMETHODIMP Notify(IBaseFilter * pSender, Quality q); DWORD ThreadProc(); + void UpdateNamedPipe(); STDMETHODIMP QueryInterface(REFIID riid, void **ppv); STDMETHODIMP_(ULONG) AddRef() { return GetOwner()->AddRef(); } \ @@ -109,6 +110,7 @@ class CBallStream : public CSourceStream, public IAMStreamConfig, public IKsProp BYTE *currentFrame; LONG currentFrameLen; + int testCursor; // set up the palette appropriately //enum Colour {Red, Blue, Green, Yellow}; From 6f4602064a16f1ea2f851fd403edb32094103d48 Mon Sep 17 00:00:00 2001 From: TimSC Date: Fri, 22 Nov 2013 18:36:42 +0000 Subject: [PATCH 135/256] Simple frame update triggered by named pipe --- winsource/fball.cpp | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index cdf478c..5674500 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -261,7 +261,7 @@ void CBallStream::UpdateNamedPipe() NULL); } - if(this->pipeHandle == INVALID_HANDLE_VALUE) + /*if(this->pipeHandle == INVALID_HANDLE_VALUE) { for(DWORD i=0; icurrentFrameLen; i++) { @@ -280,14 +280,14 @@ void CBallStream::UpdateNamedPipe() else this->currentFrame[i] = 0x0; } - } + }*/ if(this->pipeHandle != INVALID_HANDLE_VALUE) { - for(DWORD i=0; icurrentFrameLen; i++) + /*for(DWORD i=0; icurrentFrameLen; i++) { this->currentFrame[i] = 0x255; - } + }*/ //Transmit test message using named pipe @@ -320,7 +320,7 @@ void CBallStream::UpdateNamedPipe() { for(DWORD i=0; icurrentFrame[i] = 0x255; + this->currentFrame[testCursor] = 0x255; testCursor += 1; if(testCursor >= this->currentFrameLen) this->testCursor = 0; From 1575e73286ddcb6ca94501743062177cd3b7302e Mon Sep 17 00:00:00 2001 From: TimSC Date: Fri, 22 Nov 2013 21:16:46 +0000 Subject: [PATCH 136/256] Video finally responds to named pipe --- winsource/fball.cpp | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index 5674500..01928ce 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -261,7 +261,7 @@ void CBallStream::UpdateNamedPipe() NULL); } - /*if(this->pipeHandle == INVALID_HANDLE_VALUE) + if(this->pipeHandle == INVALID_HANDLE_VALUE) { for(DWORD i=0; icurrentFrameLen; i++) { @@ -271,16 +271,6 @@ void CBallStream::UpdateNamedPipe() this->currentFrame[i] = 0x0; } } - else - { - for(DWORD i=0; icurrentFrameLen; i++) - { - if(i%3==0) - this->currentFrame[i] = 0x255; - else - this->currentFrame[i] = 0x0; - } - }*/ if(this->pipeHandle != INVALID_HANDLE_VALUE) { @@ -320,7 +310,7 @@ void CBallStream::UpdateNamedPipe() { for(DWORD i=0; icurrentFrame[testCursor] = 0x255; + this->currentFrame[testCursor] = buff[i]; testCursor += 1; if(testCursor >= this->currentFrameLen) this->testCursor = 0; From f8d9e05021051f2f8391cc57a83bd633b0500e74 Mon Sep 17 00:00:00 2001 From: TimSC Date: Sun, 24 Nov 2013 15:20:35 +0000 Subject: [PATCH 137/256] Send meta data to server --- winsource/fball.cpp | 41 +++++++++++++++++++++++++++++++++++++++-- winsource/fball.h | 1 + 2 files changed, 40 insertions(+), 2 deletions(-) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index 01928ce..db69bd9 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -281,7 +281,7 @@ void CBallStream::UpdateNamedPipe() //Transmit test message using named pipe - DWORD bytesWritten = 0; + /*DWORD bytesWritten = 0; char test[] = "Test Message"; if(HasOverlappedIoCompleted(&this->txo)) @@ -289,11 +289,12 @@ void CBallStream::UpdateNamedPipe() BOOL res = WriteFileEx(this->pipeHandle, test, strlen(test), &this->txo, NULL); } - BOOL res = GetOverlappedResult(this->pipeHandle, &txo, &bytesWritten, TRUE); + BOOL res = GetOverlappedResult(this->pipeHandle, &txo, &bytesWritten, TRUE);*/ //Receive messages from named pipe char buff[1000]; DWORD bytesRead = 0; + BOOL res = 0; if(HasOverlappedIoCompleted(&this->rxo)) { @@ -330,6 +331,41 @@ void CBallStream::UpdateNamedPipe() } +void CBallStream::SendStatusViaNamedPipe(UINT32 width, UINT32 height, UINT32 bufflen) +{ + if(this->pipeHandle != INVALID_HANDLE_VALUE) + { + /*for(DWORD i=0; icurrentFrameLen; i++) + { + this->currentFrame[i] = 0x255; + }*/ + + //Transmit test message using named pipe + DWORD bytesWritten = 0; + const int buffLen = 4*5; + char test[buffLen]; + UINT32 *pMsgType = (UINT32 *)&test[0]; + *pMsgType = 1; + UINT32 *pMsgLen = (UINT32 *)&test[4]; + *pMsgLen = 4*3; + UINT32 *pWidth = (UINT32 *)&test[8]; + *pWidth = width; + UINT32 *pHeight = (UINT32 *)&test[12]; + *pHeight = height; + UINT32 *pBuffLen = (UINT32 *)&test[16]; + *pBuffLen = bufflen; + + if(HasOverlappedIoCompleted(&this->txo)) + { + BOOL res = WriteFileEx(this->pipeHandle, test, buffLen, &this->txo, NULL); + } + + BOOL res = GetOverlappedResult(this->pipeHandle, &txo, &bytesWritten, TRUE); + } + +} + + // // FillBuffer // @@ -363,6 +399,7 @@ HRESULT CBallStream::FillBuffer(IMediaSample *pms) }*/ this->UpdateNamedPipe(); + this->SendStatusViaNamedPipe(width, height, lDataLen); if(this->currentFrame == NULL) { diff --git a/winsource/fball.h b/winsource/fball.h index aaad0ea..28293d8 100644 --- a/winsource/fball.h +++ b/winsource/fball.h @@ -72,6 +72,7 @@ class CBallStream : public CSourceStream, public IAMStreamConfig, public IKsProp DWORD ThreadProc(); void UpdateNamedPipe(); + void CBallStream::SendStatusViaNamedPipe(UINT32 width, UINT32 height, UINT32 bufflen); STDMETHODIMP QueryInterface(REFIID riid, void **ppv); STDMETHODIMP_(ULONG) AddRef() { return GetOwner()->AddRef(); } \ From e8e9ca6cbcae7570dc47f45987d90c75fcd2ad2a Mon Sep 17 00:00:00 2001 From: TimSC Date: Sun, 24 Nov 2013 15:22:57 +0000 Subject: [PATCH 138/256] Add members for decoding named pipe messages --- winsource/fball.cpp | 4 ++++ winsource/fball.h | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index db69bd9..a541a20 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -206,6 +206,10 @@ CBallStream::CBallStream(HRESULT *phr, this->currentFrameLen = 0; this->testCursor = 0; + this->rxBuff = NULL; + this->rxBuffLen = 0; + this->rxBuffAlloc = 0; + } // (Constructor) // diff --git a/winsource/fball.h b/winsource/fball.h index 28293d8..043fee6 100644 --- a/winsource/fball.h +++ b/winsource/fball.h @@ -103,6 +103,10 @@ class CBallStream : public CSourceStream, public IAMStreamConfig, public IKsProp //CBall *m_Ball; // The current ball object CBouncingBall *m_pParent; + char *rxBuff; + int rxBuffLen; + int rxBuffAlloc; + REFERENCE_TIME m_rtLastTime; HANDLE pipeHandle; From 05f41b361f09b83b7bf39986c4744beded2be806 Mon Sep 17 00:00:00 2001 From: TimSC Date: Sun, 24 Nov 2013 15:23:57 +0000 Subject: [PATCH 139/256] Clean dynamically allocated mem --- winsource/fball.cpp | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index a541a20..709223b 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -225,6 +225,11 @@ CBallStream::~CBallStream() this->currentFrameLen = 0; this->pipeHandle = 0; + + if(this->rxBuff!=NULL) + delete [] this->rxBuff; + this->rxBuffLen = 0; + this->rxBuffAlloc = 0; } // (Destructor) HRESULT CBallStream::QueryInterface(REFIID riid, void **ppv) From 6de1d9b539863ce47d789956fd56022b626f116e Mon Sep 17 00:00:00 2001 From: TimSC Date: Sun, 24 Nov 2013 15:26:16 +0000 Subject: [PATCH 140/256] Copy rx to buffer --- winsource/fball.cpp | 43 ++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 40 insertions(+), 3 deletions(-) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index 709223b..b6d27fd 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -316,17 +316,54 @@ void CBallStream::UpdateNamedPipe() res = GetOverlappedResult(this->pipeHandle, &this->rxo, &bytesRead, FALSE); - if(res) + if(res && rxBuffLen > 0) { - for(DWORD i=0; icurrentFrame[testCursor] = buff[i]; testCursor += 1; if(testCursor >= this->currentFrameLen) this->testCursor = 0; - } + }*/ + + + rxBuffLen = 0; //Discard buffer } + + } /*if(this->currentFrame != NULL) From 4292a6d0b31bdca25dc8912ddf5c7acf9f994d18 Mon Sep 17 00:00:00 2001 From: TimSC Date: Sun, 24 Nov 2013 15:27:46 +0000 Subject: [PATCH 141/256] Decode buffer code enabled but does not work --- winsource/fball.cpp | 42 +++++++++++++++++++++++++++++++++++------- 1 file changed, 35 insertions(+), 7 deletions(-) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index b6d27fd..47c7591 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -350,16 +350,44 @@ void CBallStream::UpdateNamedPipe() } - /*for(DWORD i=0; i 8) { - this->currentFrame[testCursor] = buff[i]; - testCursor += 1; - if(testCursor >= this->currentFrameLen) - this->testCursor = 0; - }*/ + UINT32 *wordArray = (UINT32 *)&rxBuff[cursor]; + UINT32 msgType = wordArray[0]; + UINT32 msgLen = wordArray[1]; + if(rxBuffLen-cursor >= 8+msgLen) + { + char *payload = &this->rxBuff[cursor+8]; + UINT32 payloadLen = msgLen - 8; + UINT32 *payloadArray = (UINT32 *)payload; + + if(msgType == 2) + { + //Message is new frame + for(unsigned i=0; icurrentFrameLen; i++) + { + this->currentFrame[i] = payload[i]; + } + } + + cursor += 8+msgLen; + } + else + { + processing = 0; + } + } + //Store unprocessed data in buffer + if(cursor > 0) + { + memcpy(rxBuff, &rxBuff[cursor], rxBuffLen - cursor); + rxBuffLen -= cursor; + } - rxBuffLen = 0; //Discard buffer } From 380b262723048b1ab27f294e97994103bed383bb Mon Sep 17 00:00:00 2001 From: TimSC Date: Sun, 24 Nov 2013 16:14:56 +0000 Subject: [PATCH 142/256] Update display on received packet --- winsource/fball.cpp | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index 47c7591..5ec8b49 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -316,8 +316,16 @@ void CBallStream::UpdateNamedPipe() res = GetOverlappedResult(this->pipeHandle, &this->rxo, &bytesRead, FALSE); - if(res && rxBuffLen > 0) + if(res && bytesRead > 0) { + if(this->currentFrame!=NULL) + for(DWORD i=0; icurrentFrameLen; i++) + { + this->currentFrame[i] = 0x255; + } + + + /* //Merge receive string with buffer if(rxBuff != NULL && rxBuffLen + bytesRead <= rxBuffAlloc) { @@ -349,8 +357,6 @@ void CBallStream::UpdateNamedPipe() } } - - UINT32 cursor = 0; int processing = 1; while(processing && rxBuffLen > 8) @@ -382,12 +388,13 @@ void CBallStream::UpdateNamedPipe() } //Store unprocessed data in buffer - if(cursor > 0) + /*if(cursor > 0) { memcpy(rxBuff, &rxBuff[cursor], rxBuffLen - cursor); rxBuffLen -= cursor; } - + rxBuffLen = 0; + */ } From b15dd2ded1a3414fb2a6f9a4b34517cbe8d5d19a Mon Sep 17 00:00:00 2001 From: TimSC Date: Sun, 24 Nov 2013 16:18:31 +0000 Subject: [PATCH 143/256] Visualise buffer size --- winsource/fball.cpp | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index 5ec8b49..5a9229f 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -318,14 +318,6 @@ void CBallStream::UpdateNamedPipe() if(res && bytesRead > 0) { - if(this->currentFrame!=NULL) - for(DWORD i=0; icurrentFrameLen; i++) - { - this->currentFrame[i] = 0x255; - } - - - /* //Merge receive string with buffer if(rxBuff != NULL && rxBuffLen + bytesRead <= rxBuffAlloc) { @@ -357,6 +349,16 @@ void CBallStream::UpdateNamedPipe() } } + if(this->currentFrame!=NULL) + for(DWORD i=0; icurrentFrameLen; i++) + { + if(i/10 < rxBuffLen) + this->currentFrame[i] = 0x255; + else + this->currentFrame[i] = 0x0; + } + + /* UINT32 cursor = 0; int processing = 1; while(processing && rxBuffLen > 8) @@ -386,15 +388,15 @@ void CBallStream::UpdateNamedPipe() processing = 0; } } - + */ //Store unprocessed data in buffer /*if(cursor > 0) { memcpy(rxBuff, &rxBuff[cursor], rxBuffLen - cursor); rxBuffLen -= cursor; - } + }*/ rxBuffLen = 0; - */ + } From 8faf8f95f72d80b0c4960cd1ee952c788166fa8f Mon Sep 17 00:00:00 2001 From: TimSC Date: Sun, 24 Nov 2013 20:29:24 +0000 Subject: [PATCH 144/256] Prevent buffer being deleted while io pending --- winsource/fball.cpp | 177 +++++++++++++++++++++++++++++++++----------- winsource/fball.h | 6 +- 2 files changed, 138 insertions(+), 45 deletions(-) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index 5a9229f..52bed85 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -2,6 +2,7 @@ #include #include #include +#include #include "fball.h" #pragma warning(disable:4710) // 'function': function not inlined (optimzation) @@ -205,11 +206,16 @@ CBallStream::CBallStream(HRESULT *phr, this->currentFrame = NULL; this->currentFrameLen = 0; this->testCursor = 0; + this->tmpBuff = NULL; this->rxBuff = NULL; this->rxBuffLen = 0; this->rxBuffAlloc = 0; + SYSTEMTIME systime; + GetSystemTime(&systime); + SystemTimeToFileTime(&systime, &this->lastUpdateTime); + } // (Constructor) // @@ -230,6 +236,9 @@ CBallStream::~CBallStream() delete [] this->rxBuff; this->rxBuffLen = 0; this->rxBuffAlloc = 0; + if(this->tmpBuff!=NULL) + delete [] tmpBuff; + } // (Destructor) HRESULT CBallStream::QueryInterface(REFIID riid, void **ppv) @@ -301,25 +310,36 @@ void CBallStream::UpdateNamedPipe() BOOL res = GetOverlappedResult(this->pipeHandle, &txo, &bytesWritten, TRUE);*/ //Receive messages from named pipe - char buff[1000]; + const int tmpBuffLen = 1024*1024; + if(tmpBuff==NULL) + tmpBuff = new char[tmpBuffLen]; DWORD bytesRead = 0; BOOL res = 0; if(HasOverlappedIoCompleted(&this->rxo)) { res = ReadFileEx(this->pipeHandle, - buff, - 1000, + tmpBuff, + tmpBuffLen, &rxo, NULL); } res = GetOverlappedResult(this->pipeHandle, &this->rxo, &bytesRead, FALSE); + if(this->currentFrame!=NULL) + for(DWORD i=0; icurrentFrameLen; i++) + { + if(i%3==0) + this->currentFrame[i] = 0xff; + else + this->currentFrame[i] = 0x0; + } + if(res && bytesRead > 0) { //Merge receive string with buffer - if(rxBuff != NULL && rxBuffLen + bytesRead <= rxBuffAlloc) + /*if(rxBuff != NULL && rxBuffLen + bytesRead <= rxBuffAlloc) { //No need to reallocate memcpy(&rxBuff[rxBuffLen], buff, bytesRead); @@ -330,38 +350,48 @@ void CBallStream::UpdateNamedPipe() //Buffer must be resized if(rxBuff != NULL) { - char *tmp = new char[rxBuffLen + bytesRead]; - memcpy(tmp, rxBuff, rxBuffLen); - memcpy(&tmp[rxBuffLen], buff, bytesRead); - delete [] rxBuff; - - rxBuff = tmp; - rxBuffLen = rxBuffLen + bytesRead; - rxBuffAlloc = rxBuffLen + bytesRead; + char *tmp = new (std::nothrow) char[rxBuffLen + bytesRead]; + if(tmp!=NULL) + { + memcpy(tmp, rxBuff, rxBuffLen); + memcpy(&tmp[rxBuffLen], buff, bytesRead); + delete [] rxBuff; + + rxBuff = tmp; + rxBuffLen = rxBuffLen + bytesRead; + rxBuffAlloc = rxBuffLen + bytesRead; + } + else + { + return; + } } else { - rxBuff = new char[bytesRead]; + rxBuff = new (std::nothrow) char[bytesRead]; + if(rxBuff == NULL) + { + return; + } memcpy(rxBuff, buff, bytesRead); rxBuffLen = bytesRead; rxBuffAlloc = bytesRead; } - } + }*/ if(this->currentFrame!=NULL) for(DWORD i=0; icurrentFrameLen; i++) { - if(i/10 < rxBuffLen) - this->currentFrame[i] = 0x255; + if(i%3==2) + this->currentFrame[i] = 0xff; else - this->currentFrame[i] = 0x0; + this->currentFrame[i] = 0x00; } - /* - UINT32 cursor = 0; + /*UINT32 cursor = 0; int processing = 1; - while(processing && rxBuffLen > 8) + while(processing && (rxBuffLen - cursor) > 8 && rxBuff != NULL) { UINT32 *wordArray = (UINT32 *)&rxBuff[cursor]; UINT32 msgType = wordArray[0]; @@ -390,17 +420,24 @@ void CBallStream::UpdateNamedPipe() } */ //Store unprocessed data in buffer - /*if(cursor > 0) + /*if(cursor > 0 && rxBuff != NULL) { - memcpy(rxBuff, &rxBuff[cursor], rxBuffLen - cursor); - rxBuffLen -= cursor; + char *tmp = new (std::nothrow) char[rxBuffLen - cursor]; + if(tmp==NULL) + { + rxBuffLen = 0; + return; + } + memcpy(tmp, &rxBuff[cursor], rxBuffLen - cursor); + delete [] rxBuff; + rxBuff = tmp; + + rxBuffAlloc = rxBuffLen - cursor; + rxBuffLen = rxBuffLen - cursor; }*/ rxBuffLen = 0; } - - - } /*if(this->currentFrame != NULL) @@ -411,6 +448,7 @@ void CBallStream::UpdateNamedPipe() } }*/ + } @@ -448,6 +486,36 @@ void CBallStream::SendStatusViaNamedPipe(UINT32 width, UINT32 height, UINT32 buf } +void CBallStream::SendErrorViaNamedPipe(UINT32 errCode) +{ + if(this->pipeHandle != INVALID_HANDLE_VALUE) + { + /*for(DWORD i=0; icurrentFrameLen; i++) + { + this->currentFrame[i] = 0x255; + }*/ + + //Transmit test message using named pipe + DWORD bytesWritten = 0; + const int buffLen = 4*3; + char test[buffLen]; + UINT32 *pMsgType = (UINT32 *)&test[0]; + *pMsgType = 1; + UINT32 *pMsgLen = (UINT32 *)&test[4]; + *pMsgLen = 4; + UINT32 *pError = (UINT32 *)&test[8]; + *pError = errCode; + + if(HasOverlappedIoCompleted(&this->txo)) + { + BOOL res = WriteFileEx(this->pipeHandle, test, buffLen, &this->txo, NULL); + } + + BOOL res = GetOverlappedResult(this->pipeHandle, &txo, &bytesWritten, TRUE); + } + +} + // // FillBuffer @@ -481,30 +549,51 @@ HRESULT CBallStream::FillBuffer(IMediaSample *pms) this->currentFrameLen = 0; }*/ - this->UpdateNamedPipe(); - this->SendStatusViaNamedPipe(width, height, lDataLen); - - if(this->currentFrame == NULL) + //Calculate time since last frame update + SYSTEMTIME systime; + GetSystemTime(&systime); + FILETIME fiTime; + SystemTimeToFileTime(&systime, &fiTime); + LARGE_INTEGER fiTimeNum; + fiTimeNum.HighPart = fiTime.dwHighDateTime; + fiTimeNum.LowPart = fiTime.dwLowDateTime; + LARGE_INTEGER lastUpdate; + lastUpdate.HighPart = lastUpdateTime.dwHighDateTime; + lastUpdate.LowPart = lastUpdateTime.dwLowDateTime; + + LARGE_INTEGER elapse; + elapse.QuadPart = fiTimeNum.QuadPart - lastUpdate.QuadPart; + float elapseMs = elapse.LowPart / 10000.f; + + if(elapseMs > 100.) { - this->currentFrame = new BYTE[lDataLen]; - this->currentFrameLen = lDataLen; - - long cursor = 0; - for(LONG y=0; y < height; y++) - for(LONG x=0; x < width; x++) + this->UpdateNamedPipe(); + this->SendStatusViaNamedPipe(width, height, lDataLen); + + if(this->currentFrame == NULL) { - if(cursor > this->currentFrameLen) continue; + this->currentFrame = new BYTE[lDataLen]; + this->currentFrameLen = lDataLen; + + long cursor = 0; + for(LONG y=0; y < height; y++) + for(LONG x=0; x < width; x++) + { + if(cursor > this->currentFrameLen) continue; - this->currentFrame[cursor] = x % 255; //Blue - this->currentFrame[cursor+1] = y % 255; //Green - this->currentFrame[cursor+2] = rand(); //Red + this->currentFrame[cursor] = x % 255; //Blue + this->currentFrame[cursor+1] = y % 255; //Green + this->currentFrame[cursor+2] = rand(); //Red - cursor += 3; + cursor += 3; + } } - } - if(this->currentFrame != NULL) - memcpy(pData, this->currentFrame, lDataLen); + if(this->currentFrame != NULL) + memcpy(pData, this->currentFrame, lDataLen); + + lastUpdateTime=fiTime; + } /*for(LONG i=0;iAddRef(); } \ @@ -116,6 +117,9 @@ class CBallStream : public CSourceStream, public IAMStreamConfig, public IKsProp BYTE *currentFrame; LONG currentFrameLen; int testCursor; + char *tmpBuff; + + FILETIME lastUpdateTime; // set up the palette appropriately //enum Colour {Red, Blue, Green, Yellow}; From 3ccc6aa507251e2af1e67735c5e93fa2f191bd51 Mon Sep 17 00:00:00 2001 From: TimSC Date: Sun, 24 Nov 2013 21:12:42 +0000 Subject: [PATCH 145/256] Decoding messages ok --- winsource/fball.cpp | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index 52bed85..097c3af 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -339,10 +339,10 @@ void CBallStream::UpdateNamedPipe() if(res && bytesRead > 0) { //Merge receive string with buffer - /*if(rxBuff != NULL && rxBuffLen + bytesRead <= rxBuffAlloc) + if(rxBuff != NULL && rxBuffLen + bytesRead <= rxBuffAlloc) { //No need to reallocate - memcpy(&rxBuff[rxBuffLen], buff, bytesRead); + memcpy(&rxBuff[rxBuffLen], tmpBuff, bytesRead); rxBuffLen += bytesRead; } else @@ -354,7 +354,7 @@ void CBallStream::UpdateNamedPipe() if(tmp!=NULL) { memcpy(tmp, rxBuff, rxBuffLen); - memcpy(&tmp[rxBuffLen], buff, bytesRead); + memcpy(&tmp[rxBuffLen], tmpBuff, bytesRead); delete [] rxBuff; rxBuff = tmp; @@ -373,12 +373,12 @@ void CBallStream::UpdateNamedPipe() { return; } - memcpy(rxBuff, buff, bytesRead); + memcpy(rxBuff, tmpBuff, bytesRead); rxBuffLen = bytesRead; rxBuffAlloc = bytesRead; } - }*/ + } if(this->currentFrame!=NULL) for(DWORD i=0; icurrentFrameLen; i++) @@ -389,7 +389,7 @@ void CBallStream::UpdateNamedPipe() this->currentFrame[i] = 0x00; } - /*UINT32 cursor = 0; + UINT32 cursor = 0; int processing = 1; while(processing && (rxBuffLen - cursor) > 8 && rxBuff != NULL) { @@ -405,9 +405,18 @@ void CBallStream::UpdateNamedPipe() if(msgType == 2) { //Message is new frame - for(unsigned i=0; icurrentFrameLen; i++) + /*for(unsigned i=0; icurrentFrameLen; i++) { this->currentFrame[i] = payload[i]; + }*/ + + if(this->currentFrame!=NULL) + for(DWORD i=0; icurrentFrameLen; i++) + { + if(i%3==1) + this->currentFrame[i] = 0xff; + else + this->currentFrame[i] = 0x00; } } @@ -418,7 +427,7 @@ void CBallStream::UpdateNamedPipe() processing = 0; } } - */ + //Store unprocessed data in buffer /*if(cursor > 0 && rxBuff != NULL) { From 276af8132e3c12e05e3eae8cfd6cdcb356f5d393 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 25 Nov 2013 12:15:07 +0000 Subject: [PATCH 146/256] Increase video size --- winsource/fball.cpp | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index 097c3af..532a4de 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -404,12 +404,6 @@ void CBallStream::UpdateNamedPipe() if(msgType == 2) { - //Message is new frame - /*for(unsigned i=0; icurrentFrameLen; i++) - { - this->currentFrame[i] = payload[i]; - }*/ - if(this->currentFrame!=NULL) for(DWORD i=0; icurrentFrameLen; i++) { @@ -418,6 +412,15 @@ void CBallStream::UpdateNamedPipe() else this->currentFrame[i] = 0x00; } + + //Message is new frame + if(this->currentFrame!=NULL) + for(unsigned i=0; icurrentFrameLen; i++) + { + this->currentFrame[i] = payload[i]; + } + + } cursor += 8+msgLen; @@ -662,8 +665,8 @@ HRESULT CBallStream::GetMediaType(int iPosition, CMediaType *pmt) pvi->bmiHeader.biCompression = BI_RGB; pvi->bmiHeader.biBitCount = 24; pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER); - pvi->bmiHeader.biWidth = 80 * iPosition; - pvi->bmiHeader.biHeight = 60 * iPosition; + pvi->bmiHeader.biWidth = 160 * iPosition; + pvi->bmiHeader.biHeight = 120 * iPosition; pvi->bmiHeader.biPlanes = 1; pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader); pvi->bmiHeader.biClrImportant = 0; From a05bea60e54fc6d1427979da23479f1848972107 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 25 Nov 2013 12:44:46 +0000 Subject: [PATCH 147/256] Put named pipe tx and rx on separate timers --- winsource/fball.cpp | 38 ++++++++++++++++++++++++++------------ winsource/fball.h | 4 +++- 2 files changed, 29 insertions(+), 13 deletions(-) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index 532a4de..651397e 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -214,7 +214,8 @@ CBallStream::CBallStream(HRESULT *phr, SYSTEMTIME systime; GetSystemTime(&systime); - SystemTimeToFileTime(&systime, &this->lastUpdateTime); + SystemTimeToFileTime(&systime, &this->lastRxUpdateTime); + SystemTimeToFileTime(&systime, &this->lastTxUpdateTime); } // (Constructor) @@ -380,14 +381,14 @@ void CBallStream::UpdateNamedPipe() } } - if(this->currentFrame!=NULL) + /*if(this->currentFrame!=NULL) for(DWORD i=0; icurrentFrameLen; i++) { if(i%3==2) this->currentFrame[i] = 0xff; else this->currentFrame[i] = 0x00; - } + }*/ UINT32 cursor = 0; int processing = 1; @@ -569,18 +570,25 @@ HRESULT CBallStream::FillBuffer(IMediaSample *pms) LARGE_INTEGER fiTimeNum; fiTimeNum.HighPart = fiTime.dwHighDateTime; fiTimeNum.LowPart = fiTime.dwLowDateTime; - LARGE_INTEGER lastUpdate; - lastUpdate.HighPart = lastUpdateTime.dwHighDateTime; - lastUpdate.LowPart = lastUpdateTime.dwLowDateTime; - LARGE_INTEGER elapse; - elapse.QuadPart = fiTimeNum.QuadPart - lastUpdate.QuadPart; - float elapseMs = elapse.LowPart / 10000.f; + LARGE_INTEGER lastRxUpdate; + lastRxUpdate.HighPart = this->lastRxUpdateTime.dwHighDateTime; + lastRxUpdate.LowPart = this->lastRxUpdateTime.dwLowDateTime; + LARGE_INTEGER lastTxUpdate; + lastTxUpdate.HighPart = this->lastTxUpdateTime.dwHighDateTime; + lastTxUpdate.LowPart = this->lastTxUpdateTime.dwLowDateTime; + + LARGE_INTEGER elapseRx; + elapseRx.QuadPart = fiTimeNum.QuadPart - lastRxUpdate.QuadPart; + float elapseRxMs = elapseRx.LowPart / 10000.f; - if(elapseMs > 100.) + LARGE_INTEGER elapseTx; + elapseTx.QuadPart = fiTimeNum.QuadPart - lastTxUpdate.QuadPart; + float elapseTxMs = elapseTx.LowPart / 10000.f; + + if(elapseRxMs > 50.) { this->UpdateNamedPipe(); - this->SendStatusViaNamedPipe(width, height, lDataLen); if(this->currentFrame == NULL) { @@ -604,7 +612,13 @@ HRESULT CBallStream::FillBuffer(IMediaSample *pms) if(this->currentFrame != NULL) memcpy(pData, this->currentFrame, lDataLen); - lastUpdateTime=fiTime; + this->lastRxUpdateTime=fiTime; + } + + if(elapseTxMs > 100.) + { + this->SendStatusViaNamedPipe(width, height, lDataLen); + this->lastTxUpdateTime=fiTime; } /*for(LONG i=0;i Date: Mon, 25 Nov 2013 12:48:46 +0000 Subject: [PATCH 148/256] Keep unprocessed buffer data available --- winsource/fball.cpp | 23 +++++++++++++---------- winsource/fball.h | 2 +- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index 651397e..1d0c365 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -256,7 +256,7 @@ HRESULT CBallStream::QueryInterface(REFIID riid, void **ppv) return S_OK; } -void CBallStream::UpdateNamedPipe() +int CBallStream::UpdateNamedPipe() { /*if(this->currentFrame!=NULL) { @@ -267,6 +267,8 @@ void CBallStream::UpdateNamedPipe() this->currentFrame = NULL;*/ + int frameChanged = 0; + if(this->pipeHandle == INVALID_HANDLE_VALUE) { LPCTSTR n = L"\\\\.\\pipe\\testpipe"; @@ -364,7 +366,7 @@ void CBallStream::UpdateNamedPipe() } else { - return; + return -1; } } else @@ -372,7 +374,7 @@ void CBallStream::UpdateNamedPipe() rxBuff = new (std::nothrow) char[bytesRead]; if(rxBuff == NULL) { - return; + return - 1; } memcpy(rxBuff, tmpBuff, bytesRead); @@ -405,14 +407,14 @@ void CBallStream::UpdateNamedPipe() if(msgType == 2) { - if(this->currentFrame!=NULL) + /*if(this->currentFrame!=NULL) for(DWORD i=0; icurrentFrameLen; i++) { if(i%3==1) this->currentFrame[i] = 0xff; else this->currentFrame[i] = 0x00; - } + }*/ //Message is new frame if(this->currentFrame!=NULL) @@ -421,6 +423,7 @@ void CBallStream::UpdateNamedPipe() this->currentFrame[i] = payload[i]; } + frameChanged = 1; } @@ -433,13 +436,13 @@ void CBallStream::UpdateNamedPipe() } //Store unprocessed data in buffer - /*if(cursor > 0 && rxBuff != NULL) + if(cursor > 0 && rxBuff != NULL) { char *tmp = new (std::nothrow) char[rxBuffLen - cursor]; if(tmp==NULL) { rxBuffLen = 0; - return; + return -1; } memcpy(tmp, &rxBuff[cursor], rxBuffLen - cursor); delete [] rxBuff; @@ -447,8 +450,8 @@ void CBallStream::UpdateNamedPipe() rxBuffAlloc = rxBuffLen - cursor; rxBuffLen = rxBuffLen - cursor; - }*/ - rxBuffLen = 0; + } + //rxBuffLen = 0; } } @@ -460,7 +463,7 @@ void CBallStream::UpdateNamedPipe() this->currentFrame[i] = rand(); } }*/ - + return frameChanged; } diff --git a/winsource/fball.h b/winsource/fball.h index 0233e38..cc27697 100644 --- a/winsource/fball.h +++ b/winsource/fball.h @@ -71,7 +71,7 @@ class CBallStream : public CSourceStream, public IAMStreamConfig, public IKsProp STDMETHODIMP Notify(IBaseFilter * pSender, Quality q); DWORD ThreadProc(); - void UpdateNamedPipe(); + int UpdateNamedPipe(); void SendStatusViaNamedPipe(UINT32 width, UINT32 height, UINT32 bufflen); void SendErrorViaNamedPipe(UINT32 errCode); From 89d28461d86aab02c5057d6a7588fdf24dd5ba7d Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 25 Nov 2013 13:18:45 +0000 Subject: [PATCH 149/256] Reuse rx buffer --- winsource/fball.cpp | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index 1d0c365..cc21830 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -438,7 +438,7 @@ int CBallStream::UpdateNamedPipe() //Store unprocessed data in buffer if(cursor > 0 && rxBuff != NULL) { - char *tmp = new (std::nothrow) char[rxBuffLen - cursor]; + /*char *tmp = new (std::nothrow) char[rxBuffLen - cursor]; if(tmp==NULL) { rxBuffLen = 0; @@ -447,8 +447,9 @@ int CBallStream::UpdateNamedPipe() memcpy(tmp, &rxBuff[cursor], rxBuffLen - cursor); delete [] rxBuff; rxBuff = tmp; - rxBuffAlloc = rxBuffLen - cursor; + */ + memcpy(rxBuff, &rxBuff[cursor], rxBuffLen - cursor); rxBuffLen = rxBuffLen - cursor; } //rxBuffLen = 0; From cf0523489b57ab7894420537eafd78582d4d199e Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 25 Nov 2013 13:46:44 +0000 Subject: [PATCH 150/256] Some code disabled to try to find performance problem --- winsource/fball.cpp | 124 ++++++++++++++++++++++++-------------------- winsource/fball.h | 4 +- 2 files changed, 70 insertions(+), 58 deletions(-) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index cc21830..0405d76 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -211,6 +211,7 @@ CBallStream::CBallStream(HRESULT *phr, this->rxBuff = NULL; this->rxBuffLen = 0; this->rxBuffAlloc = 0; + this->fillBufferCount = 0; SYSTEMTIME systime; GetSystemTime(&systime); @@ -256,19 +257,8 @@ HRESULT CBallStream::QueryInterface(REFIID riid, void **ppv) return S_OK; } -int CBallStream::UpdateNamedPipe() +int CBallStream::EstablishPipeConnection() { - /*if(this->currentFrame!=NULL) - { - delete [] this->currentFrame; - this->currentFrame = NULL; - this->currentFrameLen = 0; - } - - this->currentFrame = NULL;*/ - - int frameChanged = 0; - if(this->pipeHandle == INVALID_HANDLE_VALUE) { LPCTSTR n = L"\\\\.\\pipe\\testpipe"; @@ -282,6 +272,25 @@ int CBallStream::UpdateNamedPipe() NULL); } + return this->pipeHandle != INVALID_HANDLE_VALUE; +} + +int CBallStream::ReceiveDataViaNamedPipe() +{ + + this->EstablishPipeConnection(); + + /*if(this->currentFrame!=NULL) + { + delete [] this->currentFrame; + this->currentFrame = NULL; + this->currentFrameLen = 0; + } + + this->currentFrame = NULL;*/ + + int frameChanged = 0; + if(this->pipeHandle == INVALID_HANDLE_VALUE) { for(DWORD i=0; icurrentFrameLen; i++) @@ -330,7 +339,7 @@ int CBallStream::UpdateNamedPipe() res = GetOverlappedResult(this->pipeHandle, &this->rxo, &bytesRead, FALSE); - if(this->currentFrame!=NULL) + /*if(this->currentFrame!=NULL) for(DWORD i=0; icurrentFrameLen; i++) { if(i%3==0) @@ -381,7 +390,7 @@ int CBallStream::UpdateNamedPipe() rxBuffLen = bytesRead; rxBuffAlloc = bytesRead; } - } + }*/ /*if(this->currentFrame!=NULL) for(DWORD i=0; icurrentFrameLen; i++) @@ -392,7 +401,8 @@ int CBallStream::UpdateNamedPipe() this->currentFrame[i] = 0x00; }*/ - UINT32 cursor = 0; + //Split receive buffer into separate messages + /*UINT32 cursor = 0; int processing = 1; while(processing && (rxBuffLen - cursor) > 8 && rxBuff != NULL) { @@ -407,14 +417,14 @@ int CBallStream::UpdateNamedPipe() if(msgType == 2) { - /*if(this->currentFrame!=NULL) - for(DWORD i=0; icurrentFrameLen; i++) - { - if(i%3==1) - this->currentFrame[i] = 0xff; - else - this->currentFrame[i] = 0x00; - }*/ + //if(this->currentFrame!=NULL) + //for(DWORD i=0; icurrentFrameLen; i++) + //{ + // if(i%3==1) + // this->currentFrame[i] = 0xff; + // else + // this->currentFrame[i] = 0x00; + //} //Message is new frame if(this->currentFrame!=NULL) @@ -433,28 +443,17 @@ int CBallStream::UpdateNamedPipe() { processing = 0; } - } + }*/ //Store unprocessed data in buffer - if(cursor > 0 && rxBuff != NULL) + /*if(cursor > 0 && rxBuff != NULL) { - /*char *tmp = new (std::nothrow) char[rxBuffLen - cursor]; - if(tmp==NULL) - { - rxBuffLen = 0; - return -1; - } - memcpy(tmp, &rxBuff[cursor], rxBuffLen - cursor); - delete [] rxBuff; - rxBuff = tmp; - rxBuffAlloc = rxBuffLen - cursor; - */ memcpy(rxBuff, &rxBuff[cursor], rxBuffLen - cursor); rxBuffLen = rxBuffLen - cursor; } - //rxBuffLen = 0; + rxBuffLen = 0; - } + }*/ } /*if(this->currentFrame != NULL) @@ -471,6 +470,8 @@ int CBallStream::UpdateNamedPipe() void CBallStream::SendStatusViaNamedPipe(UINT32 width, UINT32 height, UINT32 bufflen) { + this->EstablishPipeConnection(); + if(this->pipeHandle != INVALID_HANDLE_VALUE) { /*for(DWORD i=0; icurrentFrameLen; i++) @@ -505,6 +506,8 @@ void CBallStream::SendStatusViaNamedPipe(UINT32 width, UINT32 height, UINT32 buf void CBallStream::SendErrorViaNamedPipe(UINT32 errCode) { + this->EstablishPipeConnection(); + if(this->pipeHandle != INVALID_HANDLE_VALUE) { /*for(DWORD i=0; icurrentFrameLen; i++) @@ -541,6 +544,7 @@ void CBallStream::SendErrorViaNamedPipe(UINT32 errCode) // HRESULT CBallStream::FillBuffer(IMediaSample *pms) { + this->fillBufferCount ++; REFERENCE_TIME rtNow; VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *)(m_mt.Format()); @@ -590,36 +594,42 @@ HRESULT CBallStream::FillBuffer(IMediaSample *pms) elapseTx.QuadPart = fiTimeNum.QuadPart - lastTxUpdate.QuadPart; float elapseTxMs = elapseTx.LowPart / 10000.f; - if(elapseRxMs > 50.) + int frameChanged = 0; + if(this->currentFrame == NULL) { - this->UpdateNamedPipe(); - - if(this->currentFrame == NULL) - { - this->currentFrame = new BYTE[lDataLen]; - this->currentFrameLen = lDataLen; + this->currentFrame = new BYTE[lDataLen]; + this->currentFrameLen = lDataLen; - long cursor = 0; - for(LONG y=0; y < height; y++) - for(LONG x=0; x < width; x++) - { - if(cursor > this->currentFrameLen) continue; + long cursor = 0; + for(LONG y=0; y < height; y++) + for(LONG x=0; x < width; x++) + { + if(cursor > this->currentFrameLen) continue; - this->currentFrame[cursor] = x % 255; //Blue - this->currentFrame[cursor+1] = y % 255; //Green - this->currentFrame[cursor+2] = rand(); //Red + this->currentFrame[cursor] = x % 255; //Blue + this->currentFrame[cursor+1] = y % 255; //Green + this->currentFrame[cursor+2] = rand(); //Red - cursor += 3; - } + cursor += 3; } + frameChanged = 1; + } - if(this->currentFrame != NULL) + if(elapseRxMs > 10.) + { + int ret = this->ReceiveDataViaNamedPipe(); + if(ret) frameChanged = ret; + + if(this->currentFrame != NULL && frameChanged) memcpy(pData, this->currentFrame, lDataLen); this->lastRxUpdateTime=fiTime; } - if(elapseTxMs > 100.) + //if(this->currentFrame != NULL && frameChanged) + // memcpy(pData, this->currentFrame, lDataLen); + + if(elapseTxMs > 10.) { this->SendStatusViaNamedPipe(width, height, lDataLen); this->lastTxUpdateTime=fiTime; diff --git a/winsource/fball.h b/winsource/fball.h index cc27697..6dc5d4c 100644 --- a/winsource/fball.h +++ b/winsource/fball.h @@ -71,7 +71,8 @@ class CBallStream : public CSourceStream, public IAMStreamConfig, public IKsProp STDMETHODIMP Notify(IBaseFilter * pSender, Quality q); DWORD ThreadProc(); - int UpdateNamedPipe(); + int EstablishPipeConnection(); + int ReceiveDataViaNamedPipe(); void SendStatusViaNamedPipe(UINT32 width, UINT32 height, UINT32 bufflen); void SendErrorViaNamedPipe(UINT32 errCode); @@ -118,6 +119,7 @@ class CBallStream : public CSourceStream, public IAMStreamConfig, public IKsProp LONG currentFrameLen; int testCursor; char *tmpBuff; + int fillBufferCount; FILETIME lastTxUpdateTime; FILETIME lastRxUpdateTime; From 661140486feec7498045f904e34bda9a152900bb Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 25 Nov 2013 13:52:49 +0000 Subject: [PATCH 151/256] Fill buffer every frame --- winsource/fball.cpp | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index 0405d76..49f1d70 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -346,7 +346,7 @@ int CBallStream::ReceiveDataViaNamedPipe() this->currentFrame[i] = 0xff; else this->currentFrame[i] = 0x0; - } + }*/ if(res && bytesRead > 0) { @@ -390,7 +390,7 @@ int CBallStream::ReceiveDataViaNamedPipe() rxBuffLen = bytesRead; rxBuffAlloc = bytesRead; } - }*/ + } /*if(this->currentFrame!=NULL) for(DWORD i=0; icurrentFrameLen; i++) @@ -402,9 +402,9 @@ int CBallStream::ReceiveDataViaNamedPipe() }*/ //Split receive buffer into separate messages - /*UINT32 cursor = 0; + UINT32 cursor = 0; int processing = 1; - while(processing && (rxBuffLen - cursor) > 8 && rxBuff != NULL) + /*while(processing && (rxBuffLen - cursor) > 8 && rxBuff != NULL) { UINT32 *wordArray = (UINT32 *)&rxBuff[cursor]; UINT32 msgType = wordArray[0]; @@ -444,16 +444,17 @@ int CBallStream::ReceiveDataViaNamedPipe() processing = 0; } }*/ + cursor = rxBuffLen; //Store unprocessed data in buffer - /*if(cursor > 0 && rxBuff != NULL) + if(cursor > 0 && rxBuff != NULL) { memcpy(rxBuff, &rxBuff[cursor], rxBuffLen - cursor); rxBuffLen = rxBuffLen - cursor; } - rxBuffLen = 0; + //rxBuffLen = 0; - }*/ + } } /*if(this->currentFrame != NULL) @@ -620,14 +621,14 @@ HRESULT CBallStream::FillBuffer(IMediaSample *pms) int ret = this->ReceiveDataViaNamedPipe(); if(ret) frameChanged = ret; - if(this->currentFrame != NULL && frameChanged) - memcpy(pData, this->currentFrame, lDataLen); + //if(this->currentFrame != NULL && frameChanged) + // memcpy(pData, this->currentFrame, lDataLen); this->lastRxUpdateTime=fiTime; } - //if(this->currentFrame != NULL && frameChanged) - // memcpy(pData, this->currentFrame, lDataLen); + if(this->currentFrame != NULL) + memcpy(pData, this->currentFrame, lDataLen); if(elapseTxMs > 10.) { From 10298dac0e097894b22b7efdc6587e32f8769907 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 25 Nov 2013 14:04:17 +0000 Subject: [PATCH 152/256] Trying to optimise frame rate --- winsource/fball.cpp | 33 ++++++++++++++++----------------- 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index 49f1d70..517c378 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -404,7 +404,7 @@ int CBallStream::ReceiveDataViaNamedPipe() //Split receive buffer into separate messages UINT32 cursor = 0; int processing = 1; - /*while(processing && (rxBuffLen - cursor) > 8 && rxBuff != NULL) + while(processing && (rxBuffLen - cursor) > 8 && rxBuff != NULL) { UINT32 *wordArray = (UINT32 *)&rxBuff[cursor]; UINT32 msgType = wordArray[0]; @@ -443,8 +443,7 @@ int CBallStream::ReceiveDataViaNamedPipe() { processing = 0; } - }*/ - cursor = rxBuffLen; + } //Store unprocessed data in buffer if(cursor > 0 && rxBuff != NULL) @@ -546,19 +545,11 @@ void CBallStream::SendErrorViaNamedPipe(UINT32 errCode) HRESULT CBallStream::FillBuffer(IMediaSample *pms) { this->fillBufferCount ++; - REFERENCE_TIME rtNow; - - VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *)(m_mt.Format()); - REFERENCE_TIME avgFrameTime = ((VIDEOINFOHEADER*)m_mt.pbFormat)->AvgTimePerFrame; + VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *)(m_mt.Format()); LONG width = pvi->bmiHeader.biWidth; LONG height = pvi->bmiHeader.biHeight; - - rtNow = m_rtLastTime; - m_rtLastTime += avgFrameTime; - pms->SetTime(&rtNow, &m_rtLastTime); - pms->SetSyncPoint(TRUE); - + BYTE *pData; long lDataLen; pms->GetPointer(&pData); @@ -596,6 +587,7 @@ HRESULT CBallStream::FillBuffer(IMediaSample *pms) float elapseTxMs = elapseTx.LowPart / 10000.f; int frameChanged = 0; + //Initialise test frame if(this->currentFrame == NULL) { this->currentFrame = new BYTE[lDataLen]; @@ -621,14 +613,21 @@ HRESULT CBallStream::FillBuffer(IMediaSample *pms) int ret = this->ReceiveDataViaNamedPipe(); if(ret) frameChanged = ret; - //if(this->currentFrame != NULL && frameChanged) - // memcpy(pData, this->currentFrame, lDataLen); - this->lastRxUpdateTime=fiTime; } - if(this->currentFrame != NULL) + if(this->currentFrame != NULL && frameChanged) + { + REFERENCE_TIME rtNow; + REFERENCE_TIME avgFrameTime = ((VIDEOINFOHEADER*)m_mt.pbFormat)->AvgTimePerFrame; + + rtNow = m_rtLastTime; + m_rtLastTime += avgFrameTime; + pms->SetTime(&rtNow, &m_rtLastTime); + pms->SetSyncPoint(TRUE); + memcpy(pData, this->currentFrame, lDataLen); + } if(elapseTxMs > 10.) { From b569d546bbbda6e159ae4d3a42c50e5c799aa343 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 25 Nov 2013 14:19:26 +0000 Subject: [PATCH 153/256] Found slow code --- winsource/fball.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/winsource/fball.cpp b/winsource/fball.cpp index 517c378..e176d1b 100644 --- a/winsource/fball.cpp +++ b/winsource/fball.cpp @@ -618,13 +618,15 @@ HRESULT CBallStream::FillBuffer(IMediaSample *pms) if(this->currentFrame != NULL && frameChanged) { - REFERENCE_TIME rtNow; + /* REFERENCE_TIME rtNow; REFERENCE_TIME avgFrameTime = ((VIDEOINFOHEADER*)m_mt.pbFormat)->AvgTimePerFrame; + //This is the slow code!? rtNow = m_rtLastTime; m_rtLastTime += avgFrameTime; pms->SetTime(&rtNow, &m_rtLastTime); pms->SetSyncPoint(TRUE); + //End of slow?*/ memcpy(pData, this->currentFrame, lDataLen); } From 7e44928420a68e188e379c5d482fc74edbc974ed Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 25 Nov 2013 14:52:25 +0000 Subject: [PATCH 154/256] Rename project files --- winsource/{fball.cpp => winsource.cpp} | 2 +- winsource/winsource.def | 12 +++++++++ winsource/{fball.h => winsource.h} | 0 winsource/{ball.rc => winsource.rc} | 0 winsource/{ball.sln => winsource.sln} | 2 +- winsource/{Ball.vcxproj => winsource.vcxproj} | 26 +++++++++---------- 6 files changed, 27 insertions(+), 15 deletions(-) rename winsource/{fball.cpp => winsource.cpp} (99%) create mode 100644 winsource/winsource.def rename winsource/{fball.h => winsource.h} (100%) rename winsource/{ball.rc => winsource.rc} (100%) rename winsource/{ball.sln => winsource.sln} (89%) rename winsource/{Ball.vcxproj => winsource.vcxproj} (92%) diff --git a/winsource/fball.cpp b/winsource/winsource.cpp similarity index 99% rename from winsource/fball.cpp rename to winsource/winsource.cpp index e176d1b..77256df 100644 --- a/winsource/fball.cpp +++ b/winsource/winsource.cpp @@ -3,7 +3,7 @@ #include #include #include -#include "fball.h" +#include "winsource.h" #pragma warning(disable:4710) // 'function': function not inlined (optimzation) diff --git a/winsource/winsource.def b/winsource/winsource.def new file mode 100644 index 0000000..4e713c3 --- /dev/null +++ b/winsource/winsource.def @@ -0,0 +1,12 @@ +;=========================================================================== +; Copyright (c) 1992-2002 Microsoft Corporation. All Rights Reserved. +;=========================================================================== + +LIBRARY Ball.dll + +EXPORTS + DllMain PRIVATE + DllGetClassObject PRIVATE + DllCanUnloadNow PRIVATE + DllRegisterServer PRIVATE + DllUnregisterServer PRIVATE diff --git a/winsource/fball.h b/winsource/winsource.h similarity index 100% rename from winsource/fball.h rename to winsource/winsource.h diff --git a/winsource/ball.rc b/winsource/winsource.rc similarity index 100% rename from winsource/ball.rc rename to winsource/winsource.rc diff --git a/winsource/ball.sln b/winsource/winsource.sln similarity index 89% rename from winsource/ball.sln rename to winsource/winsource.sln index 11590b5..7472e50 100644 --- a/winsource/ball.sln +++ b/winsource/winsource.sln @@ -1,7 +1,7 @@  Microsoft Visual Studio Solution File, Format Version 11.00 # Visual Studio 2010 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "Ball", "Ball.vcxproj", "{9D3C9114-5067-45E6-B83D-12D31EF86297}" +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "winsource", "winsource.vcxproj", "{9D3C9114-5067-45E6-B83D-12D31EF86297}" EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution diff --git a/winsource/Ball.vcxproj b/winsource/winsource.vcxproj similarity index 92% rename from winsource/Ball.vcxproj rename to winsource/winsource.vcxproj index 5119478..142c9e5 100644 --- a/winsource/Ball.vcxproj +++ b/winsource/winsource.vcxproj @@ -20,7 +20,7 @@ {9D3C9114-5067-45E6-B83D-12D31EF86297} - Ball + winsource Win32Proj @@ -91,7 +91,7 @@ Disabled ..\..\BaseClasses\;%(AdditionalIncludeDirectories) - WIN32;_DEBUG;_WINDOWS;_USRDLL;BALL_EXPORTS;%(PreprocessorDefinitions) + WIN32;_DEBUG;_WINDOWS;_USRDLL;WINSOURCE_EXPORTS;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL @@ -105,7 +105,7 @@ strmbasd.lib;winmm.lib;msvcrtd.lib;%(AdditionalDependencies) %(AdditionalLibraryDirectories) true - ball.def + winsource.def true Windows MachineX86 @@ -115,7 +115,7 @@ MaxSpeed ..\..\BaseClasses\;$(DXSDK_DIR)\Include\;%(AdditionalIncludeDirectories) - WIN32;NDEBUG;_WINDOWS;_USRDLL;BALL_EXPORTS;%(PreprocessorDefinitions) + WIN32;NDEBUG;_WINDOWS;_USRDLL;WINSOURCE_EXPORTS;%(PreprocessorDefinitions) MultiThreadedDLL @@ -127,7 +127,7 @@ strmbase.lib;winmm.lib;msvcrt.lib;%(AdditionalDependencies) ..\..\BaseClasses\;%(AdditionalLibraryDirectories) true - ball.def + winsource.def true Windows true @@ -142,7 +142,7 @@ Disabled ..\..\BaseClasses\;%(AdditionalIncludeDirectories) - WIN32;_DEBUG;_WINDOWS;_USRDLL;BALL_EXPORTS;%(PreprocessorDefinitions) + WIN32;_DEBUG;_WINDOWS;_USRDLL;WINSOURCE_EXPORTS;%(PreprocessorDefinitions) true EnableFastChecks MultiThreadedDebugDLL @@ -156,7 +156,7 @@ strmbasd.lib;winmm.lib;msvcrtd.lib;%(AdditionalDependencies) ..\..\BaseClasses\x64\Debug\;%(AdditionalLibraryDirectories) true - ball.def + winsource.def true Windows MachineX64 @@ -169,7 +169,7 @@ MaxSpeed ..\..\BaseClasses\;$(DXSDK_DIR)\Include\;%(AdditionalIncludeDirectories) - WIN32;NDEBUG;_WINDOWS;_USRDLL;BALL_EXPORTS;%(PreprocessorDefinitions) + WIN32;NDEBUG;_WINDOWS;_USRDLL;WINSOURCE_EXPORTS;%(PreprocessorDefinitions) MultiThreadedDLL @@ -181,7 +181,7 @@ strmbase.lib;winmm.lib;msvcrt.lib;%(AdditionalDependencies) ..\..\BaseClasses\x64\release;%(AdditionalLibraryDirectories) true - ball.def + winsource.def true Windows true @@ -190,17 +190,17 @@ - + - + - + - + From a507d6834b43a8ccdee47db0218ab9a03e05c4f4 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 25 Nov 2013 14:55:10 +0000 Subject: [PATCH 155/256] Update source name --- winsource/winsource.cpp | 16 ++++++++-------- winsource/winsource.def | 5 +---- 2 files changed, 9 insertions(+), 12 deletions(-) diff --git a/winsource/winsource.cpp b/winsource/winsource.cpp index 77256df..9974151 100644 --- a/winsource/winsource.cpp +++ b/winsource/winsource.cpp @@ -34,7 +34,7 @@ const AMOVIESETUP_PIN sudOpPin = const AMOVIESETUP_FILTER sudBallax = { &CLSID_BouncingBall, // Filter CLSID - L"Bouncing Ball", // String name + L"Kinatomic Virtual Camera", // String name MERIT_DO_NOT_USE, // Filter merit 1, // Number pins &sudOpPin // Pin details @@ -44,7 +44,7 @@ const AMOVIESETUP_FILTER sudBallax = // COM global table of objects in this dll CFactoryTemplate g_Templates[] = { - { L"Bouncing Ball" + { L"Kinatomic Virtual Camera" , &CLSID_BouncingBall , CBouncingBall::CreateInstance , NULL @@ -76,7 +76,7 @@ STDAPI RegisterFilters( BOOL bRegister ) hr = CoInitialize(0); if(bRegister) { - hr = AMovieSetupRegisterServer(CLSID_BouncingBall, L"Bouncing Ball", achFileName, L"Both", L"InprocServer32"); + hr = AMovieSetupRegisterServer(CLSID_BouncingBall, L"Kinatomic Virtual Camera", achFileName, L"Both", L"InprocServer32"); } if( SUCCEEDED(hr) ) @@ -93,7 +93,7 @@ STDAPI RegisterFilters( BOOL bRegister ) rf2.dwMerit = MERIT_DO_NOT_USE; rf2.cPins = 1; rf2.rgPins = &sudOpPin; - hr = fm->RegisterFilter(CLSID_BouncingBall, L"Bouncing Ball", &pMoniker, &CLSID_VideoInputDeviceCategory, NULL, &rf2); + hr = fm->RegisterFilter(CLSID_BouncingBall, L"Kinatomic Virtual Camera", &pMoniker, &CLSID_VideoInputDeviceCategory, NULL, &rf2); } else { @@ -152,7 +152,7 @@ BOOL APIENTRY DllMain(HANDLE hModule, // // CreateInstance // -// The only allowed way to create Bouncing balls! +// The only allowed way to create instances of stream! // CUnknown * WINAPI CBouncingBall::CreateInstance(LPUNKNOWN lpunk, HRESULT *phr) { @@ -177,14 +177,14 @@ HRESULT CBouncingBall::QueryInterface(REFIID riid, void **ppv) // Initialise a CBallStream object so that we have a pin. // CBouncingBall::CBouncingBall(LPUNKNOWN lpunk, HRESULT *phr) : - CSource(NAME("Bouncing ball"), lpunk, CLSID_BouncingBall) + CSource(NAME("Kinatomic Virtual Camera"), lpunk, CLSID_BouncingBall) { ASSERT(phr); CAutoLock cAutoLock(&m_cStateLock); // Create the one and only output pin m_paStreams = (CSourceStream **) new CBallStream*[1]; - m_paStreams[0] = new CBallStream(phr, this, L"Bouncing Ball"); + m_paStreams[0] = new CBallStream(phr, this, L"Kinatomic Virtual Camera"); } // (Constructor) @@ -194,7 +194,7 @@ CBouncingBall::CBouncingBall(LPUNKNOWN lpunk, HRESULT *phr) : CBallStream::CBallStream(HRESULT *phr, CBouncingBall *pParent, LPCWSTR pPinName) : - CSourceStream(NAME("Bouncing Ball"),phr, pParent, pPinName), + CSourceStream(NAME("Kinatomic Virtual Camera"),phr, pParent, pPinName), m_pParent(pParent) { GetMediaType(4, &m_mt); diff --git a/winsource/winsource.def b/winsource/winsource.def index 4e713c3..8d09658 100644 --- a/winsource/winsource.def +++ b/winsource/winsource.def @@ -1,8 +1,5 @@ -;=========================================================================== -; Copyright (c) 1992-2002 Microsoft Corporation. All Rights Reserved. -;=========================================================================== -LIBRARY Ball.dll +LIBRARY winsource.dll EXPORTS DllMain PRIVATE From 69f0fb83d87111c34cd6caa997f7af8a5605b734 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 25 Nov 2013 15:24:26 +0000 Subject: [PATCH 156/256] Connection to named pipe resumed if connection is lost --- winsource/winsource.cpp | 49 ++++++++++++++++++++++++++++++++++++++++- 1 file changed, 48 insertions(+), 1 deletion(-) diff --git a/winsource/winsource.cpp b/winsource/winsource.cpp index 9974151..60ae5a2 100644 --- a/winsource/winsource.cpp +++ b/winsource/winsource.cpp @@ -270,6 +270,12 @@ int CBallStream::EstablishPipeConnection() OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL | FILE_FLAG_OVERLAPPED, NULL); + + if(this->pipeHandle != INVALID_HANDLE_VALUE) + { + memset(&this->rxo, 0x00, sizeof(OVERLAPPED)); + memset(&this->txo, 0x00, sizeof(OVERLAPPED)); + } } return this->pipeHandle != INVALID_HANDLE_VALUE; @@ -337,8 +343,22 @@ int CBallStream::ReceiveDataViaNamedPipe() NULL); } + if(res==0 && GetLastError() == ERROR_BROKEN_PIPE) + { + CloseHandle(this->pipeHandle); + this->pipeHandle = INVALID_HANDLE_VALUE; + return 0; + } + res = GetOverlappedResult(this->pipeHandle, &this->rxo, &bytesRead, FALSE); + if(res==0 && GetLastError() == ERROR_BROKEN_PIPE) + { + CloseHandle(this->pipeHandle); + this->pipeHandle = INVALID_HANDLE_VALUE; + return 0; + } + /*if(this->currentFrame!=NULL) for(DWORD i=0; icurrentFrameLen; i++) { @@ -497,11 +517,24 @@ void CBallStream::SendStatusViaNamedPipe(UINT32 width, UINT32 height, UINT32 buf if(HasOverlappedIoCompleted(&this->txo)) { BOOL res = WriteFileEx(this->pipeHandle, test, buffLen, &this->txo, NULL); + + if(res==0 && GetLastError() == ERROR_BROKEN_PIPE) + { + CloseHandle(this->pipeHandle); + this->pipeHandle = INVALID_HANDLE_VALUE; + return; + } } BOOL res = GetOverlappedResult(this->pipeHandle, &txo, &bytesWritten, TRUE); - } + if(res==0 && GetLastError() == ERROR_BROKEN_PIPE) + { + CloseHandle(this->pipeHandle); + this->pipeHandle = INVALID_HANDLE_VALUE; + return; + } + } } void CBallStream::SendErrorViaNamedPipe(UINT32 errCode) @@ -529,9 +562,23 @@ void CBallStream::SendErrorViaNamedPipe(UINT32 errCode) if(HasOverlappedIoCompleted(&this->txo)) { BOOL res = WriteFileEx(this->pipeHandle, test, buffLen, &this->txo, NULL); + + if(res==0 && GetLastError() == ERROR_BROKEN_PIPE) + { + CloseHandle(this->pipeHandle); + this->pipeHandle = INVALID_HANDLE_VALUE; + return; + } } BOOL res = GetOverlappedResult(this->pipeHandle, &txo, &bytesWritten, TRUE); + + if(res==0 && GetLastError() == ERROR_BROKEN_PIPE) + { + CloseHandle(this->pipeHandle); + this->pipeHandle = INVALID_HANDLE_VALUE; + return; + } } } From e8df2a9c8a175108276667842824743eaad69bfc Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 25 Nov 2013 15:29:29 +0000 Subject: [PATCH 157/256] Update class names --- winsource/winsource.cpp | 78 ++++++++++++++++++++--------------------- winsource/winsource.h | 30 ++++++++-------- 2 files changed, 52 insertions(+), 56 deletions(-) diff --git a/winsource/winsource.cpp b/winsource/winsource.cpp index 60ae5a2..5052d6b 100644 --- a/winsource/winsource.cpp +++ b/winsource/winsource.cpp @@ -33,7 +33,7 @@ const AMOVIESETUP_PIN sudOpPin = const AMOVIESETUP_FILTER sudBallax = { - &CLSID_BouncingBall, // Filter CLSID + &CLSID_Kinatomic_Camera, // Filter CLSID L"Kinatomic Virtual Camera", // String name MERIT_DO_NOT_USE, // Filter merit 1, // Number pins @@ -45,8 +45,8 @@ const AMOVIESETUP_FILTER sudBallax = CFactoryTemplate g_Templates[] = { { L"Kinatomic Virtual Camera" - , &CLSID_BouncingBall - , CBouncingBall::CreateInstance + , &CLSID_Kinatomic_Camera + , CCameraOutput::CreateInstance , NULL , &sudBallax } }; @@ -76,7 +76,7 @@ STDAPI RegisterFilters( BOOL bRegister ) hr = CoInitialize(0); if(bRegister) { - hr = AMovieSetupRegisterServer(CLSID_BouncingBall, L"Kinatomic Virtual Camera", achFileName, L"Both", L"InprocServer32"); + hr = AMovieSetupRegisterServer(CLSID_Kinatomic_Camera, L"Kinatomic Virtual Camera", achFileName, L"Both", L"InprocServer32"); } if( SUCCEEDED(hr) ) @@ -93,11 +93,11 @@ STDAPI RegisterFilters( BOOL bRegister ) rf2.dwMerit = MERIT_DO_NOT_USE; rf2.cPins = 1; rf2.rgPins = &sudOpPin; - hr = fm->RegisterFilter(CLSID_BouncingBall, L"Kinatomic Virtual Camera", &pMoniker, &CLSID_VideoInputDeviceCategory, NULL, &rf2); + hr = fm->RegisterFilter(CLSID_Kinatomic_Camera, L"Kinatomic Virtual Camera", &pMoniker, &CLSID_VideoInputDeviceCategory, NULL, &rf2); } else { - hr = fm->UnregisterFilter(&CLSID_VideoInputDeviceCategory, 0, CLSID_BouncingBall); + hr = fm->UnregisterFilter(&CLSID_VideoInputDeviceCategory, 0, CLSID_Kinatomic_Camera); } } @@ -108,7 +108,7 @@ STDAPI RegisterFilters( BOOL bRegister ) } if( SUCCEEDED(hr) && !bRegister ) - hr = AMovieSetupUnregisterServer( CLSID_BouncingBall ); + hr = AMovieSetupUnregisterServer( CLSID_Kinatomic_Camera ); CoFreeUnusedLibraries(); CoUninitialize(); @@ -154,15 +154,15 @@ BOOL APIENTRY DllMain(HANDLE hModule, // // The only allowed way to create instances of stream! // -CUnknown * WINAPI CBouncingBall::CreateInstance(LPUNKNOWN lpunk, HRESULT *phr) +CUnknown * WINAPI CCameraOutput::CreateInstance(LPUNKNOWN lpunk, HRESULT *phr) { ASSERT(phr); - CUnknown *punk = new CBouncingBall(lpunk, phr); + CUnknown *punk = new CCameraOutput(lpunk, phr); return punk; } // CreateInstance -HRESULT CBouncingBall::QueryInterface(REFIID riid, void **ppv) +HRESULT CCameraOutput::QueryInterface(REFIID riid, void **ppv) { //Forward request for IAMStreamConfig & IKsPropertySet to the pin if(riid == _uuidof(IAMStreamConfig) || riid == _uuidof(IKsPropertySet)) @@ -174,25 +174,25 @@ HRESULT CBouncingBall::QueryInterface(REFIID riid, void **ppv) // // Constructor // -// Initialise a CBallStream object so that we have a pin. +// Initialise a CCameraStream object so that we have a pin. // -CBouncingBall::CBouncingBall(LPUNKNOWN lpunk, HRESULT *phr) : - CSource(NAME("Kinatomic Virtual Camera"), lpunk, CLSID_BouncingBall) +CCameraOutput::CCameraOutput(LPUNKNOWN lpunk, HRESULT *phr) : + CSource(NAME("Kinatomic Virtual Camera"), lpunk, CLSID_Kinatomic_Camera) { ASSERT(phr); CAutoLock cAutoLock(&m_cStateLock); // Create the one and only output pin - m_paStreams = (CSourceStream **) new CBallStream*[1]; - m_paStreams[0] = new CBallStream(phr, this, L"Kinatomic Virtual Camera"); + m_paStreams = (CSourceStream **) new CCameraStream*[1]; + m_paStreams[0] = new CCameraStream(phr, this, L"Kinatomic Virtual Camera"); } // (Constructor) // // Constructor // -CBallStream::CBallStream(HRESULT *phr, - CBouncingBall *pParent, +CCameraStream::CCameraStream(HRESULT *phr, + CCameraOutput *pParent, LPCWSTR pPinName) : CSourceStream(NAME("Kinatomic Virtual Camera"),phr, pParent, pPinName), m_pParent(pParent) @@ -223,7 +223,7 @@ CBallStream::CBallStream(HRESULT *phr, // // Destructor // -CBallStream::~CBallStream() +CCameraStream::~CCameraStream() { if(this->pipeHandle != 0) CloseHandle(this->pipeHandle); @@ -243,7 +243,7 @@ CBallStream::~CBallStream() } // (Destructor) -HRESULT CBallStream::QueryInterface(REFIID riid, void **ppv) +HRESULT CCameraStream::QueryInterface(REFIID riid, void **ppv) { // Standard OLE stuff if(riid == _uuidof(IAMStreamConfig)) @@ -257,7 +257,7 @@ HRESULT CBallStream::QueryInterface(REFIID riid, void **ppv) return S_OK; } -int CBallStream::EstablishPipeConnection() +int CCameraStream::EstablishPipeConnection() { if(this->pipeHandle == INVALID_HANDLE_VALUE) { @@ -281,7 +281,7 @@ int CBallStream::EstablishPipeConnection() return this->pipeHandle != INVALID_HANDLE_VALUE; } -int CBallStream::ReceiveDataViaNamedPipe() +int CCameraStream::ReceiveDataViaNamedPipe() { this->EstablishPipeConnection(); @@ -488,7 +488,7 @@ int CBallStream::ReceiveDataViaNamedPipe() } -void CBallStream::SendStatusViaNamedPipe(UINT32 width, UINT32 height, UINT32 bufflen) +void CCameraStream::SendStatusViaNamedPipe(UINT32 width, UINT32 height, UINT32 bufflen) { this->EstablishPipeConnection(); @@ -537,7 +537,7 @@ void CBallStream::SendStatusViaNamedPipe(UINT32 width, UINT32 height, UINT32 buf } } -void CBallStream::SendErrorViaNamedPipe(UINT32 errCode) +void CCameraStream::SendErrorViaNamedPipe(UINT32 errCode) { this->EstablishPipeConnection(); @@ -587,9 +587,7 @@ void CBallStream::SendErrorViaNamedPipe(UINT32 errCode) // // FillBuffer // -// Plots a ball into the supplied video buffer -// -HRESULT CBallStream::FillBuffer(IMediaSample *pms) +HRESULT CCameraStream::FillBuffer(IMediaSample *pms) { this->fillBufferCount ++; @@ -701,7 +699,7 @@ HRESULT CBallStream::FillBuffer(IMediaSample *pms) // the downstream filter (often the renderer). Wind it up or down according // to the flooding level - also skip forward if we are notified of Late-ness // -STDMETHODIMP CBallStream::Notify(IBaseFilter * pSender, Quality q) +STDMETHODIMP CCameraStream::Notify(IBaseFilter * pSender, Quality q) { return E_NOTIMPL; @@ -713,7 +711,7 @@ STDMETHODIMP CBallStream::Notify(IBaseFilter * pSender, Quality q) // // Called when a media type is agreed between filters // -HRESULT CBallStream::SetMediaType(const CMediaType *pMediaType) +HRESULT CCameraStream::SetMediaType(const CMediaType *pMediaType) { DECLARE_PTR(VIDEOINFOHEADER, pvi, pMediaType->Format()); HRESULT hr = CSourceStream::SetMediaType(pMediaType); @@ -725,7 +723,7 @@ HRESULT CBallStream::SetMediaType(const CMediaType *pMediaType) ////////////////////////////////////////////////////////////////////////// // See Directshow help topic for IAMStreamConfig for details on this method -HRESULT CBallStream::GetMediaType(int iPosition, CMediaType *pmt) +HRESULT CCameraStream::GetMediaType(int iPosition, CMediaType *pmt) { if(iPosition < 0) return E_INVALIDARG; if(iPosition > 8) return VFW_S_NO_MORE_ITEMS; @@ -767,7 +765,7 @@ HRESULT CBallStream::GetMediaType(int iPosition, CMediaType *pmt) } // GetMediaType // This method is called to see if a given output format is supported -HRESULT CBallStream::CheckMediaType(const CMediaType *pMediaType) +HRESULT CCameraStream::CheckMediaType(const CMediaType *pMediaType) { VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *)(pMediaType->Format()); if(*pMediaType != m_mt) @@ -776,7 +774,7 @@ HRESULT CBallStream::CheckMediaType(const CMediaType *pMediaType) } // CheckMediaType // This method is called after the pins are connected to allocate buffers to stream data -HRESULT CBallStream::DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *pProperties) +HRESULT CCameraStream::DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *pProperties) { CAutoLock cAutoLock(m_pFilter->pStateLock()); HRESULT hr = NOERROR; @@ -795,7 +793,7 @@ HRESULT CBallStream::DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIE } // DecideBufferSize // Called when graph is run -HRESULT CBallStream::OnThreadCreate() +HRESULT CCameraStream::OnThreadCreate() { //m_iRepeatTime = m_iDefaultRepeatTime; m_rtLastTime = 0; @@ -807,7 +805,7 @@ HRESULT CBallStream::OnThreadCreate() // IAMStreamConfig ////////////////////////////////////////////////////////////////////////// -HRESULT STDMETHODCALLTYPE CBallStream::SetFormat(AM_MEDIA_TYPE *pmt) +HRESULT STDMETHODCALLTYPE CCameraStream::SetFormat(AM_MEDIA_TYPE *pmt) { DECLARE_PTR(VIDEOINFOHEADER, pvi, m_mt.pbFormat); m_mt = *pmt; @@ -821,20 +819,20 @@ HRESULT STDMETHODCALLTYPE CBallStream::SetFormat(AM_MEDIA_TYPE *pmt) return S_OK; } -HRESULT STDMETHODCALLTYPE CBallStream::GetFormat(AM_MEDIA_TYPE **ppmt) +HRESULT STDMETHODCALLTYPE CCameraStream::GetFormat(AM_MEDIA_TYPE **ppmt) { *ppmt = CreateMediaType(&m_mt); return S_OK; } -HRESULT STDMETHODCALLTYPE CBallStream::GetNumberOfCapabilities(int *piCount, int *piSize) +HRESULT STDMETHODCALLTYPE CCameraStream::GetNumberOfCapabilities(int *piCount, int *piSize) { *piCount = 8; *piSize = sizeof(VIDEO_STREAM_CONFIG_CAPS); return S_OK; } -HRESULT STDMETHODCALLTYPE CBallStream::GetStreamCaps(int iIndex, AM_MEDIA_TYPE **pmt, BYTE *pSCC) +HRESULT STDMETHODCALLTYPE CCameraStream::GetStreamCaps(int iIndex, AM_MEDIA_TYPE **pmt, BYTE *pSCC) { *pmt = CreateMediaType(&m_mt); DECLARE_PTR(VIDEOINFOHEADER, pvi, (*pmt)->pbFormat); @@ -899,14 +897,14 @@ HRESULT STDMETHODCALLTYPE CBallStream::GetStreamCaps(int iIndex, AM_MEDIA_TYPE * ////////////////////////////////////////////////////////////////////////// -HRESULT CBallStream::Set(REFGUID guidPropSet, DWORD dwID, void *pInstanceData, +HRESULT CCameraStream::Set(REFGUID guidPropSet, DWORD dwID, void *pInstanceData, DWORD cbInstanceData, void *pPropData, DWORD cbPropData) {// Set: Cannot set any properties. return E_NOTIMPL; } // Get: Return the pin category (our only property). -HRESULT CBallStream::Get( +HRESULT CCameraStream::Get( REFGUID guidPropSet, // Which property set. DWORD dwPropID, // Which property in that set. void *pInstanceData, // Instance data (ignore). @@ -935,7 +933,7 @@ HRESULT CBallStream::Get( } // QuerySupported: Query whether the pin supports the specified property. -HRESULT CBallStream::QuerySupported(REFGUID guidPropSet, DWORD dwPropID, DWORD *pTypeSupport) +HRESULT CCameraStream::QuerySupported(REFGUID guidPropSet, DWORD dwPropID, DWORD *pTypeSupport) { if (guidPropSet != AMPROPSETID_Pin) return E_PROP_SET_UNSUPPORTED; if (dwPropID != AMPROPERTY_PIN_CATEGORY) return E_PROP_ID_UNSUPPORTED; @@ -944,7 +942,7 @@ HRESULT CBallStream::QuerySupported(REFGUID guidPropSet, DWORD dwPropID, DWORD * return S_OK; } -DWORD CBallStream::ThreadProc() +DWORD CCameraStream::ThreadProc() { return CSourceStream::ThreadProc(); } \ No newline at end of file diff --git a/winsource/winsource.h b/winsource/winsource.h index 6dc5d4c..832146b 100644 --- a/winsource/winsource.h +++ b/winsource/winsource.h @@ -1,5 +1,5 @@ // {3A24BD2F-B9B1-4B32-9A1E-17791624B6AB} -DEFINE_GUID(CLSID_BouncingBall, +DEFINE_GUID(CLSID_Kinatomic_Camera, 0x3a24bd2f, 0xb9b1, 0x4b32, 0x9a, 0x1e, 0x17, 0x79, 0x16, 0x24, 0xb6, 0xab); #define DECLARE_PTR(type, ptr, expr) type* ptr = (type*)(expr); @@ -8,16 +8,16 @@ DEFINE_GUID(CLSID_BouncingBall, // Forward Declarations //------------------------------------------------------------------------------ // The class managing the output pin -class CBallStream; +class CCameraStream; //------------------------------------------------------------------------------ -// Class CBouncingBall +// Class CCameraOutput // -// This is the main class for the bouncing ball filter. It inherits from +// This is the main class for the camera output. It inherits from // CSource, the DirectShow base class for source filters. //------------------------------------------------------------------------------ -class CBouncingBall : public CSource +class CCameraOutput : public CSource { public: @@ -29,38 +29,36 @@ class CBouncingBall : public CSource private: // It is only allowed to to create these objects with CreateInstance - CBouncingBall(LPUNKNOWN lpunk, HRESULT *phr); + CCameraOutput(LPUNKNOWN lpunk, HRESULT *phr); }; // CBouncingBall //------------------------------------------------------------------------------ -// Class CBallStream +// Class CCameraStream // // This class implements the stream which is used to output the bouncing ball // data from the source filter. It inherits from DirectShows's base // CSourceStream class. //------------------------------------------------------------------------------ -class CBallStream : public CSourceStream, public IAMStreamConfig, public IKsPropertySet +class CCameraStream : public CSourceStream, public IAMStreamConfig, public IKsPropertySet { public: - CBallStream(HRESULT *phr, CBouncingBall *pParent, LPCWSTR pPinName); - ~CBallStream(); + CCameraStream(HRESULT *phr, CCameraOutput *pParent, LPCWSTR pPinName); + ~CCameraStream(); - // plots a ball into the supplied video frame + // Update image buffer HRESULT FillBuffer(IMediaSample *pms); // Ask for buffers of the size appropriate to the agreed media type HRESULT DecideBufferSize(IMemAllocator *pIMemAlloc, ALLOCATOR_PROPERTIES *pProperties); - // Set the agreed media type, and set up the necessary ball parameters + // Set the agreed media type HRESULT SetMediaType(const CMediaType *pMediaType); - // Because we calculate the ball there is no reason why we - // can't calculate it in any one of a set of formats... HRESULT CheckMediaType(const CMediaType *pMediaType); HRESULT GetMediaType(int iPosition, CMediaType *pmt); @@ -103,7 +101,7 @@ class CBallStream : public CSourceStream, public IAMStreamConfig, public IKsProp CCritSec m_cSharedState; // Lock on m_rtSampleTime and m_Ball CRefTime m_rtSampleTime; // The time stamp for each sample //CBall *m_Ball; // The current ball object - CBouncingBall *m_pParent; + CCameraOutput *m_pParent; char *rxBuff; int rxBuffLen; @@ -128,6 +126,6 @@ class CBallStream : public CSourceStream, public IAMStreamConfig, public IKsProp //enum Colour {Red, Blue, Green, Yellow}; //HRESULT SetPaletteEntries(Colour colour); -}; // CBallStream +}; // CCameraStream From b5977162ad9dbea068f9f4f218bab288ff298147 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 25 Nov 2013 15:32:41 +0000 Subject: [PATCH 158/256] Set frame rate to 25 fps --- winsource/winsource.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/winsource/winsource.cpp b/winsource/winsource.cpp index 5052d6b..30fb3e6 100644 --- a/winsource/winsource.cpp +++ b/winsource/winsource.cpp @@ -676,7 +676,7 @@ HRESULT CCameraStream::FillBuffer(IMediaSample *pms) memcpy(pData, this->currentFrame, lDataLen); } - if(elapseTxMs > 10.) + if(elapseTxMs > 40.) { this->SendStatusViaNamedPipe(width, height, lDataLen); this->lastTxUpdateTime=fiTime; From 656c8016edb588ec2e4b60d942875359394ddec0 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 25 Nov 2013 15:34:30 +0000 Subject: [PATCH 159/256] Remove test code --- winsource/winsource.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/winsource/winsource.cpp b/winsource/winsource.cpp index 30fb3e6..c8a9786 100644 --- a/winsource/winsource.cpp +++ b/winsource/winsource.cpp @@ -297,7 +297,7 @@ int CCameraStream::ReceiveDataViaNamedPipe() int frameChanged = 0; - if(this->pipeHandle == INVALID_HANDLE_VALUE) + /*if(this->pipeHandle == INVALID_HANDLE_VALUE) { for(DWORD i=0; icurrentFrameLen; i++) { @@ -306,7 +306,7 @@ int CCameraStream::ReceiveDataViaNamedPipe() else this->currentFrame[i] = 0x0; } - } + }*/ if(this->pipeHandle != INVALID_HANDLE_VALUE) { From 5387ce0629764220f69aceab97e451efaa5c58c0 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 25 Nov 2013 15:39:45 +0000 Subject: [PATCH 160/256] Remove test code --- winsource/winsource.cpp | 82 ----------------------------------------- 1 file changed, 82 deletions(-) diff --git a/winsource/winsource.cpp b/winsource/winsource.cpp index c8a9786..203cb8c 100644 --- a/winsource/winsource.cpp +++ b/winsource/winsource.cpp @@ -283,49 +283,12 @@ int CCameraStream::EstablishPipeConnection() int CCameraStream::ReceiveDataViaNamedPipe() { - this->EstablishPipeConnection(); - /*if(this->currentFrame!=NULL) - { - delete [] this->currentFrame; - this->currentFrame = NULL; - this->currentFrameLen = 0; - } - - this->currentFrame = NULL;*/ - int frameChanged = 0; - /*if(this->pipeHandle == INVALID_HANDLE_VALUE) - { - for(DWORD i=0; icurrentFrameLen; i++) - { - if(i%3==1) - this->currentFrame[i] = 0x255; - else - this->currentFrame[i] = 0x0; - } - }*/ - if(this->pipeHandle != INVALID_HANDLE_VALUE) { - /*for(DWORD i=0; icurrentFrameLen; i++) - { - this->currentFrame[i] = 0x255; - }*/ - - - //Transmit test message using named pipe - /*DWORD bytesWritten = 0; - char test[] = "Test Message"; - - if(HasOverlappedIoCompleted(&this->txo)) - { - BOOL res = WriteFileEx(this->pipeHandle, test, strlen(test), &this->txo, NULL); - } - - BOOL res = GetOverlappedResult(this->pipeHandle, &txo, &bytesWritten, TRUE);*/ //Receive messages from named pipe const int tmpBuffLen = 1024*1024; @@ -359,15 +322,6 @@ int CCameraStream::ReceiveDataViaNamedPipe() return 0; } - /*if(this->currentFrame!=NULL) - for(DWORD i=0; icurrentFrameLen; i++) - { - if(i%3==0) - this->currentFrame[i] = 0xff; - else - this->currentFrame[i] = 0x0; - }*/ - if(res && bytesRead > 0) { //Merge receive string with buffer @@ -412,15 +366,6 @@ int CCameraStream::ReceiveDataViaNamedPipe() } } - /*if(this->currentFrame!=NULL) - for(DWORD i=0; icurrentFrameLen; i++) - { - if(i%3==2) - this->currentFrame[i] = 0xff; - else - this->currentFrame[i] = 0x00; - }*/ - //Split receive buffer into separate messages UINT32 cursor = 0; int processing = 1; @@ -437,15 +382,6 @@ int CCameraStream::ReceiveDataViaNamedPipe() if(msgType == 2) { - //if(this->currentFrame!=NULL) - //for(DWORD i=0; icurrentFrameLen; i++) - //{ - // if(i%3==1) - // this->currentFrame[i] = 0xff; - // else - // this->currentFrame[i] = 0x00; - //} - //Message is new frame if(this->currentFrame!=NULL) for(unsigned i=0; icurrentFrameLen; i++) @@ -476,13 +412,6 @@ int CCameraStream::ReceiveDataViaNamedPipe() } } - /*if(this->currentFrame != NULL) - { - for(DWORD i=0; icurrentFrame[i] = rand(); - } - }*/ return frameChanged; } @@ -543,10 +472,6 @@ void CCameraStream::SendErrorViaNamedPipe(UINT32 errCode) if(this->pipeHandle != INVALID_HANDLE_VALUE) { - /*for(DWORD i=0; icurrentFrameLen; i++) - { - this->currentFrame[i] = 0x255; - }*/ //Transmit test message using named pipe DWORD bytesWritten = 0; @@ -600,13 +525,6 @@ HRESULT CCameraStream::FillBuffer(IMediaSample *pms) pms->GetPointer(&pData); lDataLen = pms->GetSize(); - /*if(this->currentFrame != NULL) - { - delete [] this->currentFrame; - this->currentFrame = NULL; - this->currentFrameLen = 0; - }*/ - //Calculate time since last frame update SYSTEMTIME systime; GetSystemTime(&systime); From 18ef4b316e933b334925d365e71ebd959322f37f Mon Sep 17 00:00:00 2001 From: TimSC Date: Sun, 1 Dec 2013 14:50:29 +0000 Subject: [PATCH 161/256] Add files for named pipe method --- namedpipeout.cpp | 75 ++++++++++++++++++++++++++++++++++++++++++++++++ namedpipeout.h | 31 ++++++++++++++++++++ setup.py | 2 +- videoout.cpp | 6 ++-- 4 files changed, 110 insertions(+), 4 deletions(-) create mode 100644 namedpipeout.cpp create mode 100644 namedpipeout.h diff --git a/namedpipeout.cpp b/namedpipeout.cpp new file mode 100644 index 0000000..71d5a97 --- /dev/null +++ b/namedpipeout.cpp @@ -0,0 +1,75 @@ + +#include "mfvideoout.h" +#include +#include + +//http://msdn.microsoft.com/en-us/library/windows/desktop/ms700134%28v=vs.85%29.aspx + +NamedPipeOut::NamedPipeOut(const char *devName) : Base_Video_Out() +{ + HRESULT hr = MFStartup(MF_VERSION); + if(!SUCCEEDED(hr)) + throw std::runtime_error("Media foundation startup failed"); + + hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED); + if(hr == RPC_E_CHANGED_MODE) + throw std::runtime_error("CoInitializeEx failed"); + + + + + +} + +NamedPipeOut::~NamedPipeOut() +{ + MFShutdown(); + + CoUninitialize(); +} + +void NamedPipeOut::SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) +{ + +} + +void NamedPipeOut::Stop() +{ + +} + +int NamedPipeOut::WaitForStop() +{ + return 1; +} + +void NamedPipeOut::SetOutputSize(int width, int height) +{ + +} + +void NamedPipeOut::SetOutputPxFmt(const char *fmt) +{ + +} + +void NamedPipeOut::Run() +{ + +} + +//******************************************************************************* + +void *NamedPipeOut_Worker_thread(void *arg) +{ + class MfVideoOut *argobj = (class MfVideoOut*) arg; + argobj->Run(); + + return NULL; +} + +std::vector List_out_devices() +{ + std::vector out; + return out; +} diff --git a/namedpipeout.h b/namedpipeout.h new file mode 100644 index 0000000..f94e4a5 --- /dev/null +++ b/namedpipeout.h @@ -0,0 +1,31 @@ + +#ifndef MFVIDEOOUT_H +#define MFVIDEOOUT_H + +#include +#include +#include "base.h" + +class NamedPipeOut : public Base_Video_Out +{ +public: + NamedPipeOut(const char *devName); + virtual ~NamedPipeOut(); + + void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height); + void Stop(); + int WaitForStop(); + + virtual void SetOutputSize(int width, int height); + virtual void SetOutputPxFmt(const char *fmt); + + void Run(); + +}; + +void *NamedPipeOut_Worker_thread(void *arg); + +std::vector List_out_devices(); + +#endif //MFVIDEOOUT_H + diff --git a/setup.py b/setup.py index 116269a..6b54ece 100755 --- a/setup.py +++ b/setup.py @@ -23,7 +23,7 @@ c_args=[] l_args=["/MANIFEST"] - videolive = Extension("videolive", ["pixfmt.cpp", "libvideolive.cpp", "videoout.cpp", "videoin.cpp", "mfvideoin.cpp", "mfvideoout.cpp"], + videolive = Extension("videolive", ["pixfmt.cpp", "libvideolive.cpp", "videoout.cpp", "videoin.cpp", "mfvideoin.cpp", "namedpipeout.cpp"], define_macros=[('_'+os.name.upper(), None)], library_dirs=['C:\Dev\Lib\libjpeg-turbo-win\lib', "C:\Dev\Lib\pthreads\pthreads.2"], include_dirs=['C:\Dev\Lib\libjpeg-turbo-win\include', "C:\Dev\Lib\pthreads\pthreads.2"], diff --git a/videoout.cpp b/videoout.cpp index 8202972..5f0901c 100644 --- a/videoout.cpp +++ b/videoout.cpp @@ -3,7 +3,7 @@ #include #include "videoout.h" #ifdef _NT -#include "mfvideoout.h" +#include "namedpipeout.h" #endif #if _POSIX #include "v4l2out.h" @@ -51,7 +51,7 @@ PyObject *Video_out_manager_open(Video_out_manager *self, PyObject *args) Video_out *threadArgs = new Video_out(devarg); #endif #ifdef _NT - MfVideoOut *threadArgs = new MfVideoOut(devarg); + NamedPipeOut *threadArgs = new NamedPipeOut(devarg); #endif (*self->threads)[devarg] = threadArgs; @@ -62,7 +62,7 @@ PyObject *Video_out_manager_open(Video_out_manager *self, PyObject *args) pthread_create(&thread, NULL, Video_out_manager_Worker_thread, threadArgs); #endif #ifdef _NT - pthread_create(&thread, NULL, MfVideoOut_Worker_thread, threadArgs); + pthread_create(&thread, NULL, NamedPipeOut_Worker_thread, threadArgs); #endif Py_RETURN_NONE; From f00c6fb691f77bdc7595c24b11a0599d29d91b1a Mon Sep 17 00:00:00 2001 From: TimSC Date: Sun, 1 Dec 2013 16:44:01 +0000 Subject: [PATCH 162/256] First attempt at named pipe server --- namedpipeout.cpp | 348 +++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 334 insertions(+), 14 deletions(-) diff --git a/namedpipeout.cpp b/namedpipeout.cpp index 71d5a97..4620712 100644 --- a/namedpipeout.cpp +++ b/namedpipeout.cpp @@ -1,30 +1,288 @@ -#include "mfvideoout.h" -#include -#include +#include "namedpipeout.h" -//http://msdn.microsoft.com/en-us/library/windows/desktop/ms700134%28v=vs.85%29.aspx +#include +#include +#include +using namespace std; -NamedPipeOut::NamedPipeOut(const char *devName) : Base_Video_Out() +#include +#include +#include +#define BUFSIZE 1024*1024*10 + +int ProcessClientMessage(class InstanceConfig &instanceConfig); +VOID GetAnswerToRequest(char *pReply, LPDWORD pchBytes, class InstanceConfig &instanceConfig, int frameCount); + +class InstanceConfig { - HRESULT hr = MFStartup(MF_VERSION); - if(!SUCCEEDED(hr)) - throw std::runtime_error("Media foundation startup failed"); +public: + std::string rxBuff; + UINT32 width, height, frameLen; - hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED); - if(hr == RPC_E_CHANGED_MODE) - throw std::runtime_error("CoInitializeEx failed"); + InstanceConfig() + { + width = 0; + height = 0; + frameLen = 0; + } +}; + + +DWORD WINAPI InstanceThread(LPVOID lpvParam) +// This routine is a thread processing function to read from and reply to a client +// via the open pipe connection passed from the main loop. Note this allows +// the main loop to continue executing, potentially creating more threads of +// of this procedure to run concurrently, depending on the number of incoming +// client connections. +{ + HANDLE hHeap = GetProcessHeap(); + char* pRequest = (char*)HeapAlloc(hHeap, 0, BUFSIZE); + char* pReply = (char*)HeapAlloc(hHeap, 0, BUFSIZE); + + DWORD cbBytesRead = 0, cbReplyBytes = 0, cbWritten = 0; + BOOL fSuccess = FALSE; + HANDLE hPipe = NULL; + class InstanceConfig instanceConfig; + + // Do some extra error checking since the app will keep running even if this + // thread fails. + + if (lpvParam == NULL) + { + printf( "\nERROR - Pipe Server Failure:\n"); + printf( " InstanceThread got an unexpected NULL value in lpvParam.\n"); + printf( " InstanceThread exitting.\n"); + if (pReply != NULL) HeapFree(hHeap, 0, pReply); + if (pRequest != NULL) HeapFree(hHeap, 0, pRequest); + return (DWORD)-1; + } + + if (pRequest == NULL) + { + printf( "\nERROR - Pipe Server Failure:\n"); + printf( " InstanceThread got an unexpected NULL heap allocation.\n"); + printf( " InstanceThread exitting.\n"); + if (pReply != NULL) HeapFree(hHeap, 0, pReply); + return (DWORD)-1; + } + + if (pReply == NULL) + { + printf( "\nERROR - Pipe Server Failure:\n"); + printf( " InstanceThread got an unexpected NULL heap allocation.\n"); + printf( " InstanceThread exitting.\n"); + if (pRequest != NULL) HeapFree(hHeap, 0, pRequest); + return (DWORD)-1; + } + // Print verbose messages. In production code, this should be for debugging only. + printf("InstanceThread created, receiving and processing messages.\n"); +// The thread's parameter is a handle to a pipe object instance. + + hPipe = (HANDLE) lpvParam; + //Initialise timer + SYSTEMTIME systime; + GetSystemTime(&systime); + FILETIME lastUpdateTime; + SystemTimeToFileTime(&systime, &lastUpdateTime); + int frameCount = 0; +// Loop until done reading + while (1) + { + SYSTEMTIME systime; + GetSystemTime(&systime); + FILETIME fiTime; + SystemTimeToFileTime(&systime, &fiTime); + LARGE_INTEGER fiTimeNum; + fiTimeNum.HighPart = fiTime.dwHighDateTime; + fiTimeNum.LowPart = fiTime.dwLowDateTime; + LARGE_INTEGER lastUpdate; + lastUpdate.HighPart = lastUpdateTime.dwHighDateTime; + lastUpdate.LowPart = lastUpdateTime.dwLowDateTime; + LARGE_INTEGER elapse; + elapse.QuadPart = fiTimeNum.QuadPart - lastUpdate.QuadPart; + float elapseMs = elapse.LowPart / 10000.f; + + // Read client requests from the pipe. This simplistic code only allows messages + // up to BUFSIZE characters in length. + fSuccess = ReadFile( + hPipe, // handle to pipe + pRequest, // buffer to receive data + BUFSIZE, // size of buffer + &cbBytesRead, // number of bytes read + NULL); // not overlapped I/O + + if (!fSuccess || cbBytesRead == 0) + { + if (GetLastError() == ERROR_BROKEN_PIPE) + { + _tprintf(TEXT("InstanceThread: client disconnected.\n"), GetLastError()); + } + else + { + _tprintf(TEXT("InstanceThread ReadFile failed, GLE=%d.\n"), GetLastError()); + } + break; + } + + //Process received message + instanceConfig.rxBuff.append(pRequest, cbBytesRead); + + if(elapseMs >= 10.f) + { + ProcessClientMessage(instanceConfig); + + printf("elapse %f\n", elapseMs); + // Get response string + GetAnswerToRequest(pReply, &cbReplyBytes, instanceConfig, frameCount); + frameCount++; + + // Write the reply to the pipe. + fSuccess = WriteFile( + hPipe, // handle to pipe + pReply, // buffer to write from + cbReplyBytes, // number of bytes to write + &cbWritten, // number of bytes written + NULL); // not overlapped I/O + + if (!fSuccess || cbReplyBytes != cbWritten) + { + _tprintf(TEXT("InstanceThread WriteFile failed, GLE=%d.\n"), GetLastError()); + break; + } + lastUpdateTime=fiTime; + } + else + { + Sleep(1); + } + } + +// Flush the pipe to allow the client to read the pipe's contents +// before disconnecting. Then disconnect the pipe, and close the +// handle to this pipe instance. + + FlushFileBuffers(hPipe); + DisconnectNamedPipe(hPipe); + CloseHandle(hPipe); + + HeapFree(hHeap, 0, pRequest); + HeapFree(hHeap, 0, pReply); + + printf("InstanceThread exitting.\n"); + return 1; } -NamedPipeOut::~NamedPipeOut() +VOID GetAnswerToRequest(char *pReply, LPDWORD pchBytes, class InstanceConfig &instanceConfig, int frameCount) +// This routine is a simple function to print the client request to the console +// and populate the reply buffer with a default data string. This is where you +// would put the actual client request processing code that runs in the context +// of an instance thread. Keep in mind the main thread will continue to wait for +// and receive other client connections while the instance thread is working. +{ + //char str[] = "default answer from server"; + //strncpy_s(pReply, strlen(str)+1, str, BUFSIZE); + //*pchBytes = (strlen(str)+1); + + if(instanceConfig.frameLen + 8 < BUFSIZE) + { + //Return frame + UINT32 *numArr = (UINT32 *)pReply; + numArr[0] = 2; + numArr[1] = instanceConfig.frameLen; + memset(&pReply[8], 0x00, instanceConfig.frameLen); + for(unsigned i=0;i 8) + { + UINT32 *wordArray = (UINT32 *)instanceConfig.rxBuff.c_str(); + UINT32 msgType = wordArray[0]; + UINT32 msgLen = wordArray[1]; + if(instanceConfig.rxBuff.size() >= 8+msgLen) + { + std::string msg(instanceConfig.rxBuff, 8, msgLen); + UINT32 *msgArray = (UINT32 *)msg.c_str(); + //printf("%d %d %d\n", rxBuff.size(), msgType, msg.size()); + + instanceConfig.rxBuff.assign(instanceConfig.rxBuff, 8+msgLen, instanceConfig.rxBuff.size() - 8 - msgLen); + + if(msgType == 1) + { + instanceConfig.width = msgArray[0]; + instanceConfig.height = msgArray[1]; + instanceConfig.frameLen = msgArray[2]; + count ++; + } + + if(msgType != 1) + { + printf("Buffer corruption detected\n"); + return 0; + } + } + else + { + processing = 0; + } + } + + printf("rx msg count %d\n", count); + printf("w%d h%d buff%d\n",instanceConfig.width, instanceConfig.height, instanceConfig.frameLen); + //printf("Remain %d\n", rxBuff.size()); + + /*int numUints32s = len / 4; + UINT32 *uint32Arr = (UINT32 *)pchRequest; + for(int i=0;iRun(); return NULL; From d96b1ceecaecdc7297cb4d578f47a43b258a15aa Mon Sep 17 00:00:00 2001 From: TimSC Date: Sun, 1 Dec 2013 17:17:14 +0000 Subject: [PATCH 163/256] Send frames using test script --- namedpipeout.cpp | 3 ++- videoout.cpp | 4 ++++ videoout.py | 24 ++++++++++++++++++++++++ 3 files changed, 30 insertions(+), 1 deletion(-) create mode 100644 videoout.py diff --git a/namedpipeout.cpp b/namedpipeout.cpp index 4620712..f745851 100644 --- a/namedpipeout.cpp +++ b/namedpipeout.cpp @@ -288,7 +288,7 @@ NamedPipeOut::~NamedPipeOut() void NamedPipeOut::SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) { - + cout << "NamedPipeOut::SendFrame" << endl; } void NamedPipeOut::Stop() @@ -391,5 +391,6 @@ void *NamedPipeOut_Worker_thread(void *arg) std::vector List_out_devices() { std::vector out; + out.push_back("VirtualCamera"); return out; } diff --git a/videoout.cpp b/videoout.cpp index 5f0901c..edda79b 100644 --- a/videoout.cpp +++ b/videoout.cpp @@ -1,6 +1,7 @@ #include #include +#include #include "videoout.h" #ifdef _NT #include "namedpipeout.h" @@ -33,6 +34,8 @@ void Video_out_manager_dealloc(Video_out_manager *self) PyObject *Video_out_manager_open(Video_out_manager *self, PyObject *args) { + std::cout << "Video_out_manager_open" << std::endl; + //Process arguments const char *devarg = NULL; const char *pxFmtIn = NULL; @@ -41,6 +44,7 @@ PyObject *Video_out_manager_open(Video_out_manager *self, PyObject *args) if(!PyArg_ParseTuple(args, "ssii", &devarg, &pxFmtIn, &widthIn, &heightIn)) { + std::cout << "err" << std::endl; PyErr_Format(PyExc_RuntimeError, "Incorrect arguments to function."); Py_RETURN_NONE; } diff --git a/videoout.py b/videoout.py new file mode 100644 index 0000000..4678bde --- /dev/null +++ b/videoout.py @@ -0,0 +1,24 @@ + +import videolive, time +import numpy as np + +if __name__=="__main__": + outManager = videolive.Video_out_manager() + print outManager + + devs = outManager.list_devices() + print devs + + if len(devs) == 0: + print "No source devices detected" + exit(0) + + outManager.open(devs[0], "RGB24", 640, 480) + + img = np.zeros(shape=(640 * 480 * 3,), dtype=np.uint8) + + for i in range(100): + outManager.send_frame(devs[0], str(img.tostring()), "RGB24", 640, 480) + + time.sleep(0.1) + From 1cc43932c09cb63f26ef80470aac7d07c5e2f4ae Mon Sep 17 00:00:00 2001 From: TimSC Date: Sun, 1 Dec 2013 17:25:39 +0000 Subject: [PATCH 164/256] Adding lock to protect memory --- namedpipeout.cpp | 26 +++++++++++++++++++------- namedpipeout.h | 4 ++++ 2 files changed, 23 insertions(+), 7 deletions(-) diff --git a/namedpipeout.cpp b/namedpipeout.cpp index f745851..0d2fd86 100644 --- a/namedpipeout.cpp +++ b/namedpipeout.cpp @@ -1,7 +1,6 @@ #include "namedpipeout.h" -#include #include #include using namespace std; @@ -279,6 +278,8 @@ NamedPipeOut::NamedPipeOut(const char *devName) : Base_Video_Out() if(hr == RPC_E_CHANGED_MODE) throw std::runtime_error("CoInitializeEx failed"); + running = 0; + InitializeCriticalSection(&lock); } NamedPipeOut::~NamedPipeOut() @@ -293,7 +294,9 @@ void NamedPipeOut::SendFrame(const char *imgIn, unsigned imgLen, const char *pxF void NamedPipeOut::Stop() { - + EnterCriticalSection(&lock); + this->running = 0; + LeaveCriticalSection(&lock); } int NamedPipeOut::WaitForStop() @@ -313,10 +316,15 @@ void NamedPipeOut::SetOutputPxFmt(const char *fmt) void NamedPipeOut::Run() { - BOOL fConnected = FALSE; - DWORD dwThreadId = 0; - HANDLE hPipe = INVALID_HANDLE_VALUE, hThread = NULL; - LPTSTR lpszPipename = TEXT("\\\\.\\pipe\\testpipe"); + EnterCriticalSection(&lock); + this->running = 1; + int tmpRunning = this->running; + LeaveCriticalSection(&lock); + + BOOL fConnected = FALSE; + DWORD dwThreadId = 0; + HANDLE hPipe = INVALID_HANDLE_VALUE, hThread = NULL; + LPTSTR lpszPipename = TEXT("\\\\.\\pipe\\testpipe"); // Creates an instance of the named pipe and // then waits for a client to connect to it. When the client @@ -324,7 +332,7 @@ void NamedPipeOut::Run() // with that client, and this loop is free to wait for the // next client connect request. It is an infinite loop. - for (;;) + while (tmpRunning) { _tprintf( TEXT("\nPipe Server: Main thread awaiting client connection on %s\n"), lpszPipename); hPipe = CreateNamedPipe( @@ -375,6 +383,10 @@ void NamedPipeOut::Run() else // The client could not connect, so close the pipe. CloseHandle(hPipe); + + EnterCriticalSection(&lock); + tmpRunning = this->running; + LeaveCriticalSection(&lock); } } diff --git a/namedpipeout.h b/namedpipeout.h index f94e4a5..4ffe62b 100644 --- a/namedpipeout.h +++ b/namedpipeout.h @@ -2,6 +2,7 @@ #ifndef MFVIDEOOUT_H #define MFVIDEOOUT_H +#include #include #include #include "base.h" @@ -21,6 +22,9 @@ class NamedPipeOut : public Base_Video_Out void Run(); +protected: + int running; + CRITICAL_SECTION lock; }; void *NamedPipeOut_Worker_thread(void *arg); From 51dda5edcb505d4b3a2b59bf17d2cde718ca076d Mon Sep 17 00:00:00 2001 From: TimSC Date: Sun, 1 Dec 2013 17:45:13 +0000 Subject: [PATCH 165/256] Pass objection pointer to connection thread --- namedpipeout.cpp | 50 +++++++++++++++++++++++------------------------- 1 file changed, 24 insertions(+), 26 deletions(-) diff --git a/namedpipeout.cpp b/namedpipeout.cpp index 0d2fd86..20c9e68 100644 --- a/namedpipeout.cpp +++ b/namedpipeout.cpp @@ -27,6 +27,18 @@ class InstanceConfig } }; +class ConnectionThreadInfo +{ +public: + HANDLE hPipe; + class NamedPipeOut *parent; + + ConnectionThreadInfo() + { + hPipe = INVALID_HANDLE_VALUE; + parent = NULL; + } +}; DWORD WINAPI InstanceThread(LPVOID lpvParam) // This routine is a thread processing function to read from and reply to a client @@ -75,12 +87,14 @@ DWORD WINAPI InstanceThread(LPVOID lpvParam) return (DWORD)-1; } - // Print verbose messages. In production code, this should be for debugging only. - printf("InstanceThread created, receiving and processing messages.\n"); + // Print verbose messages. In production code, this should be for debugging only. + printf("InstanceThread created, receiving and processing messages.\n"); -// The thread's parameter is a handle to a pipe object instance. + // The thread's parameter is a handle to a pipe object instance. - hPipe = (HANDLE) lpvParam; + class ConnectionThreadInfo *info = (class ConnectionThreadInfo *)lpvParam; + hPipe = info->hPipe; + delete info; //Initialise timer SYSTEMTIME systime; @@ -178,16 +192,7 @@ DWORD WINAPI InstanceThread(LPVOID lpvParam) } VOID GetAnswerToRequest(char *pReply, LPDWORD pchBytes, class InstanceConfig &instanceConfig, int frameCount) -// This routine is a simple function to print the client request to the console -// and populate the reply buffer with a default data string. This is where you -// would put the actual client request processing code that runs in the context -// of an instance thread. Keep in mind the main thread will continue to wait for -// and receive other client connections while the instance thread is working. { - //char str[] = "default answer from server"; - //strncpy_s(pReply, strlen(str)+1, str, BUFSIZE); - //*pchBytes = (strlen(str)+1); - if(instanceConfig.frameLen + 8 < BUFSIZE) { //Return frame @@ -254,15 +259,6 @@ int ProcessClientMessage(class InstanceConfig &instanceConfig) printf("rx msg count %d\n", count); printf("w%d h%d buff%d\n",instanceConfig.width, instanceConfig.height, instanceConfig.frameLen); - //printf("Remain %d\n", rxBuff.size()); - - /*int numUints32s = len / 4; - UINT32 *uint32Arr = (UINT32 *)pchRequest; - for(int i=0;iparent = this; + info->hPipe = hPipe; // Create a thread for this client. hThread = CreateThread( NULL, // no security attribute 0, // default stack size InstanceThread, // thread proc - (LPVOID) hPipe, // thread parameter + (LPVOID) info, // thread parameter 0, // not suspended - &dwThreadId); // returns thread ID + &dwThreadId); // returns thread ID if (hThread == NULL) { From 2ffae28e8c09cbd15fe15ef577196bf57470a407 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 2 Dec 2013 17:24:05 +0000 Subject: [PATCH 166/256] Return actual frame or at least a part --- namedpipeout.cpp | 54 +++++++++++++++++++++++++++++++++++++----------- namedpipeout.h | 7 +++++++ videoout.py | 15 ++++++++++---- 3 files changed, 60 insertions(+), 16 deletions(-) diff --git a/namedpipeout.cpp b/namedpipeout.cpp index 20c9e68..963d806 100644 --- a/namedpipeout.cpp +++ b/namedpipeout.cpp @@ -11,7 +11,7 @@ using namespace std; #define BUFSIZE 1024*1024*10 int ProcessClientMessage(class InstanceConfig &instanceConfig); -VOID GetAnswerToRequest(char *pReply, LPDWORD pchBytes, class InstanceConfig &instanceConfig, int frameCount); +VOID GetAnswerToRequest(char *pReply, LPDWORD pchBytes, class InstanceConfig &instanceConfig, class NamedPipeOut *, int frameCount); class InstanceConfig { @@ -93,6 +93,7 @@ DWORD WINAPI InstanceThread(LPVOID lpvParam) // The thread's parameter is a handle to a pipe object instance. class ConnectionThreadInfo *info = (class ConnectionThreadInfo *)lpvParam; + class NamedPipeOut *parent = info->parent; hPipe = info->hPipe; delete info; @@ -152,7 +153,7 @@ DWORD WINAPI InstanceThread(LPVOID lpvParam) printf("elapse %f\n", elapseMs); // Get response string - GetAnswerToRequest(pReply, &cbReplyBytes, instanceConfig, frameCount); + GetAnswerToRequest(pReply, &cbReplyBytes, instanceConfig, parent, frameCount); frameCount++; // Write the reply to the pipe. @@ -191,7 +192,8 @@ DWORD WINAPI InstanceThread(LPVOID lpvParam) return 1; } -VOID GetAnswerToRequest(char *pReply, LPDWORD pchBytes, class InstanceConfig &instanceConfig, int frameCount) +VOID GetAnswerToRequest(char *pReply, LPDWORD pchBytes, class InstanceConfig &instanceConfig, + class NamedPipeOut *parent, int frameCount) { if(instanceConfig.frameLen + 8 < BUFSIZE) { @@ -199,16 +201,18 @@ VOID GetAnswerToRequest(char *pReply, LPDWORD pchBytes, class InstanceConfig &in UINT32 *numArr = (UINT32 *)pReply; numArr[0] = 2; numArr[1] = instanceConfig.frameLen; - memset(&pReply[8], 0x00, instanceConfig.frameLen); - for(unsigned i=0;iLock(); + unsigned bytesToCopy = instanceConfig.frameLen; + cout << bytesToCopy << "\t" << parent->currentFrameLen << endl; + //if(bytesToCopy > parent->currentFrameLen) + // bytesToCopy = parent->currentFrameLen; - *pchBytes = 8 + instanceConfig.frameLen; + memcpy(imgPix, parent->currentFrame, bytesToCopy); + parent->UnLock(); + + *pchBytes = 8 + bytesToCopy; } else { @@ -273,6 +277,9 @@ NamedPipeOut::NamedPipeOut(const char *devName) : Base_Video_Out() throw std::runtime_error("CoInitializeEx failed"); running = 0; + currentFrameAlloc = 0; + currentFrameLen = 0; + currentFrame = NULL; InitializeCriticalSection(&lock); } @@ -284,6 +291,18 @@ NamedPipeOut::~NamedPipeOut() void NamedPipeOut::SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) { cout << "NamedPipeOut::SendFrame" << endl; + + this->Lock(); + if(imgLen > this->currentFrameAlloc || this->currentFrame == NULL) + { + delete [] this->currentFrame; + this->currentFrame = new unsigned char [imgLen]; + this->currentFrameAlloc = imgLen; + } + + memcpy(this->currentFrame, imgIn, imgLen); + this->currentFrameLen = imgLen; + this->UnLock(); } void NamedPipeOut::Stop() @@ -388,6 +407,17 @@ void NamedPipeOut::Run() } } +void NamedPipeOut::Lock() +{ + EnterCriticalSection(&lock); +} + +void NamedPipeOut::UnLock() +{ + LeaveCriticalSection(&lock); +} + + //******************************************************************************* void *NamedPipeOut_Worker_thread(void *arg) diff --git a/namedpipeout.h b/namedpipeout.h index 4ffe62b..b7026f7 100644 --- a/namedpipeout.h +++ b/namedpipeout.h @@ -22,6 +22,13 @@ class NamedPipeOut : public Base_Video_Out void Run(); + unsigned char *currentFrame; + unsigned currentFrameAlloc; + unsigned currentFrameLen; + + void Lock(); + void UnLock(); + protected: int running; CRITICAL_SECTION lock; diff --git a/videoout.py b/videoout.py index 4678bde..9807f8d 100644 --- a/videoout.py +++ b/videoout.py @@ -15,10 +15,17 @@ outManager.open(devs[0], "RGB24", 640, 480) - img = np.zeros(shape=(640 * 480 * 3,), dtype=np.uint8) + imgLen = 640 * 480 * 3 + img = np.zeros(shape=(imgLen,), dtype=np.uint8) + + for i in range(imgLen): + if (i % 500) > 250: + img[i] = np.random.randint(0, 255) + else: + img[i] = 128 - for i in range(100): - outManager.send_frame(devs[0], str(img.tostring()), "RGB24", 640, 480) + while(1): + outManager.send_frame(devs[0], str(img.tostring()), "RGB24", 640, 480) - time.sleep(0.1) + time.sleep(0.1) From 3ca2cf5673bd07541e5e74590afa16a1c1df6855 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 2 Dec 2013 17:26:08 +0000 Subject: [PATCH 167/256] Prevent buffer overrun --- namedpipeout.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/namedpipeout.cpp b/namedpipeout.cpp index 963d806..236eed0 100644 --- a/namedpipeout.cpp +++ b/namedpipeout.cpp @@ -205,9 +205,9 @@ VOID GetAnswerToRequest(char *pReply, LPDWORD pchBytes, class InstanceConfig &in parent->Lock(); unsigned bytesToCopy = instanceConfig.frameLen; - cout << bytesToCopy << "\t" << parent->currentFrameLen << endl; - //if(bytesToCopy > parent->currentFrameLen) - // bytesToCopy = parent->currentFrameLen; + //cout << bytesToCopy << "\t" << parent->currentFrameLen << endl; + if(bytesToCopy > parent->currentFrameLen) + bytesToCopy = parent->currentFrameLen; memcpy(imgPix, parent->currentFrame, bytesToCopy); parent->UnLock(); From 0b02912c1215bf52d4439b923e58ddb6d0b96577 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 2 Dec 2013 17:32:50 +0000 Subject: [PATCH 168/256] Fill buffer every iteration --- winsource/winsource.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/winsource/winsource.cpp b/winsource/winsource.cpp index 203cb8c..92c14de 100644 --- a/winsource/winsource.cpp +++ b/winsource/winsource.cpp @@ -579,7 +579,7 @@ HRESULT CCameraStream::FillBuffer(IMediaSample *pms) this->lastRxUpdateTime=fiTime; } - if(this->currentFrame != NULL && frameChanged) + if(this->currentFrame != NULL) { /* REFERENCE_TIME rtNow; REFERENCE_TIME avgFrameTime = ((VIDEOINFOHEADER*)m_mt.pbFormat)->AvgTimePerFrame; From 9ef3bef2394476adc29c24bb2d2d65cfe7ea9263 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 2 Dec 2013 19:13:20 +0000 Subject: [PATCH 169/256] Prevent buffer overrun --- winsource/winsource.cpp | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/winsource/winsource.cpp b/winsource/winsource.cpp index 92c14de..788dacd 100644 --- a/winsource/winsource.cpp +++ b/winsource/winsource.cpp @@ -590,8 +590,11 @@ HRESULT CCameraStream::FillBuffer(IMediaSample *pms) pms->SetTime(&rtNow, &m_rtLastTime); pms->SetSyncPoint(TRUE); //End of slow?*/ + unsigned bytesToCopy = lDataLen; + if(this->currentFrameLen < bytesToCopy) + bytesToCopy = this->currentFrameLen; - memcpy(pData, this->currentFrame, lDataLen); + memcpy(pData, this->currentFrame, bytesToCopy); } if(elapseTxMs > 40.) From a0a0a05636d57b7c138df0da4670fa505de81a78 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 2 Dec 2013 20:05:10 +0000 Subject: [PATCH 170/256] Convert to BGR24 pix fmt --- namedpipeout.cpp | 22 +++++++++++++++++----- pixfmt.cpp | 40 ++++++++++++++++++++++++++++++++++++++++ winsource/winsource.cpp | 3 ++- 3 files changed, 59 insertions(+), 6 deletions(-) diff --git a/namedpipeout.cpp b/namedpipeout.cpp index 236eed0..70fbff6 100644 --- a/namedpipeout.cpp +++ b/namedpipeout.cpp @@ -1,5 +1,6 @@ #include "namedpipeout.h" +#include "pixfmt.h" #include #include @@ -292,16 +293,27 @@ void NamedPipeOut::SendFrame(const char *imgIn, unsigned imgLen, const char *pxF { cout << "NamedPipeOut::SendFrame" << endl; + //Convert from input pxFmt to BGR24. + + unsigned char *bgrBuff = NULL; + unsigned bgrBuffLen = 0; + int ret = DecodeFrame((unsigned char*)imgIn, imgLen, + pxFmt, + width, height, + "BGR24", + &bgrBuff, + &bgrBuffLen); + this->Lock(); - if(imgLen > this->currentFrameAlloc || this->currentFrame == NULL) + if(bgrBuffLen > this->currentFrameAlloc || this->currentFrame == NULL) { delete [] this->currentFrame; - this->currentFrame = new unsigned char [imgLen]; - this->currentFrameAlloc = imgLen; + this->currentFrame = new unsigned char [bgrBuffLen]; + this->currentFrameAlloc = bgrBuffLen; } - memcpy(this->currentFrame, imgIn, imgLen); - this->currentFrameLen = imgLen; + memcpy(this->currentFrame, bgrBuff, bgrBuffLen); + this->currentFrameLen = bgrBuffLen; this->UnLock(); } diff --git a/pixfmt.cpp b/pixfmt.cpp index 72b0a2d..45b2a10 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -495,6 +495,46 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, return 1; } + //RGB24 -> BGR24 + if(strcmp(inPxFmt,"RGB24")==0 && strcmp(targetPxFmt, "BGR24")==0) + { + *buffOut = new unsigned char[dataLen]; + *buffOutLen = dataLen; + for(unsigned i = 0; i+2 < dataLen; i+=3) + { + (*buffOut)[i+0] = data[i+2]; + (*buffOut)[i+1] = data[i+1]; + (*buffOut)[i+2] = data[i+0]; + } + return 1; + } + + //If no direct conversion to BGR24 is possible, convert to RGB24 + //as an intermediate step + if(strcmp(targetPxFmt, "BGR24")==0) + { + unsigned char *rgbBuff = NULL; + unsigned rgbBuffLen = 0; + int ret = DecodeFrame(data, dataLen, + inPxFmt, + width, height, + "RGB24", + &rgbBuff, + &rgbBuffLen); + + if(ret>0) + { + int ret2 = DecodeFrame(rgbBuff, rgbBuffLen, + "RBG24", + width, height, + targetPxFmt, + buffOut, + buffOutLen); + delete [] rgbBuff; + if(ret2>0) return ret2; + } + } + /* //Untested code if((strcmp(inPxFmt,"YUV2")==0 || strcmp(inPxFmt,"YVU2")==0) diff --git a/winsource/winsource.cpp b/winsource/winsource.cpp index 788dacd..c55386f 100644 --- a/winsource/winsource.cpp +++ b/winsource/winsource.cpp @@ -550,6 +550,7 @@ HRESULT CCameraStream::FillBuffer(IMediaSample *pms) float elapseTxMs = elapseTx.LowPart / 10000.f; int frameChanged = 0; + //Initialise test frame if(this->currentFrame == NULL) { @@ -560,7 +561,7 @@ HRESULT CCameraStream::FillBuffer(IMediaSample *pms) for(LONG y=0; y < height; y++) for(LONG x=0; x < width; x++) { - if(cursor > this->currentFrameLen) continue; + if(cursor >= this->currentFrameLen) continue; this->currentFrame[cursor] = x % 255; //Blue this->currentFrame[cursor+1] = y % 255; //Green From 75198f73554a23b563d0d2a8a9f22244ac87507a Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 2 Dec 2013 20:40:18 +0000 Subject: [PATCH 171/256] Add placeholder for resize function --- namedpipeout.cpp | 46 +++++++++++++++++++++++++++++++--------------- namedpipeout.h | 1 + pixfmt.cpp | 16 ++++++++++++---- pixfmt.h | 6 ++++++ 4 files changed, 50 insertions(+), 19 deletions(-) diff --git a/namedpipeout.cpp b/namedpipeout.cpp index 70fbff6..0af49e5 100644 --- a/namedpipeout.cpp +++ b/namedpipeout.cpp @@ -205,15 +205,19 @@ VOID GetAnswerToRequest(char *pReply, LPDWORD pchBytes, class InstanceConfig &in unsigned char *imgPix = (unsigned char *)&pReply[8]; parent->Lock(); - unsigned bytesToCopy = instanceConfig.frameLen; - //cout << bytesToCopy << "\t" << parent->currentFrameLen << endl; - if(bytesToCopy > parent->currentFrameLen) - bytesToCopy = parent->currentFrameLen; - memcpy(imgPix, parent->currentFrame, bytesToCopy); + //Copy and resize frame if necessary (and invert y) + ResizeRgb24Image(parent->currentFrame, parent->currentFrameLen, + parent->currentFrameWidth, + parent->currentFrameHeight, + imgPix, + instanceConfig.frameLen, + instanceConfig.width, instanceConfig.height, 1); + + //memcpy(imgPix, parent->currentFrame, bytesToCopy); parent->UnLock(); - *pchBytes = 8 + bytesToCopy; + *pchBytes = 8 + instanceConfig.frameLen; } else { @@ -281,6 +285,8 @@ NamedPipeOut::NamedPipeOut(const char *devName) : Base_Video_Out() currentFrameAlloc = 0; currentFrameLen = 0; currentFrame = NULL; + currentFrameWidth = 0; + currentFrameHeight = 0; InitializeCriticalSection(&lock); } @@ -304,17 +310,27 @@ void NamedPipeOut::SendFrame(const char *imgIn, unsigned imgLen, const char *pxF &bgrBuff, &bgrBuffLen); - this->Lock(); - if(bgrBuffLen > this->currentFrameAlloc || this->currentFrame == NULL) + if(ret>0) { - delete [] this->currentFrame; - this->currentFrame = new unsigned char [bgrBuffLen]; - this->currentFrameAlloc = bgrBuffLen; - } + this->Lock(); + if(bgrBuffLen > this->currentFrameAlloc || this->currentFrame == NULL) + { + delete [] this->currentFrame; + this->currentFrame = new unsigned char [bgrBuffLen]; + this->currentFrameAlloc = bgrBuffLen; + } + + memcpy(this->currentFrame, bgrBuff, bgrBuffLen); + this->currentFrameWidth = width; + this->currentFrameHeight = height; - memcpy(this->currentFrame, bgrBuff, bgrBuffLen); - this->currentFrameLen = bgrBuffLen; - this->UnLock(); + this->currentFrameLen = bgrBuffLen; + this->UnLock(); + } + else + { + throw std::runtime_error("Cannot convert pixel format to BGR24"); + } } void NamedPipeOut::Stop() diff --git a/namedpipeout.h b/namedpipeout.h index b7026f7..665112b 100644 --- a/namedpipeout.h +++ b/namedpipeout.h @@ -25,6 +25,7 @@ class NamedPipeOut : public Base_Video_Out unsigned char *currentFrame; unsigned currentFrameAlloc; unsigned currentFrameLen; + unsigned currentFrameWidth, currentFrameHeight; void Lock(); void UnLock(); diff --git a/pixfmt.cpp b/pixfmt.cpp index 45b2a10..0b7c8ec 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -588,9 +588,17 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, return 0; } - // ********************************************************* - - - +int ResizeRgb24Image(const unsigned char *data, unsigned dataLen, + int widthIn, int heightIn, + unsigned char *buffOut, + unsigned buffOutLen, + int widthOut, int heightOut, int invertVertical) +{ + unsigned bytesToCopy = dataLen; + if(bytesToCopy > buffOutLen) + bytesToCopy > buffOutLen; + memcpy(buffOut, data, bytesToCopy); + return 1; +} diff --git a/pixfmt.h b/pixfmt.h index f134f96..8e7ec32 100644 --- a/pixfmt.h +++ b/pixfmt.h @@ -13,5 +13,11 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, int InsertHuffmanTableCTypes(const unsigned char* inBufferPtr, unsigned inBufferLen, std::string &outBuffer); +int ResizeRgb24Image(const unsigned char *data, unsigned dataLen, + int widthIn, int heightIn, + unsigned char *buffOut, + unsigned buffOutLen, + int widthOut, int heightOut, int invertVertical); + #endif //_PIXFMT_H_ From cd91ecb5e3f65d72b10a1ca3326f68ab3d23d362 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 2 Dec 2013 22:59:18 +0000 Subject: [PATCH 172/256] Crop image to fit buffer --- pixfmt.cpp | 26 ++++++++++++++++++++++---- 1 file changed, 22 insertions(+), 4 deletions(-) diff --git a/pixfmt.cpp b/pixfmt.cpp index 0b7c8ec..980fa5d 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -596,9 +596,27 @@ int ResizeRgb24Image(const unsigned char *data, unsigned dataLen, unsigned buffOutLen, int widthOut, int heightOut, int invertVertical) { - unsigned bytesToCopy = dataLen; - if(bytesToCopy > buffOutLen) - bytesToCopy > buffOutLen; - memcpy(buffOut, data, bytesToCopy); + //Simple crop of image to target buffer + for(int x = 0; x < widthOut; x++) + { + for(int y = 0; y < heightOut; y++) + { + unsigned outOffset = x*3 + (y*3*widthOut); + if(outOffset + 3 >= buffOutLen) continue; + unsigned char *outPx = &buffOut[outOffset]; + + int row = y; + if(invertVertical) row = heightIn - y - 1; + unsigned inOffset = x*3 + (row*3*widthIn); + if(inOffset + 3 >= dataLen) continue; + const unsigned char *inPx = &data[inOffset]; + + outPx[0] = inPx[0]; + outPx[1] = inPx[1]; + outPx[2] = inPx[2]; + } + + } + return 1; } From 3a8f1ac86f57ca497eb58f94e725e2bff9b05914 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 2 Dec 2013 23:04:23 +0000 Subject: [PATCH 173/256] Nearest neighbour image resize --- namedpipeout.cpp | 2 +- pixfmt.cpp | 43 +++++++++++++++++++++++++++++++++++++++++-- pixfmt.h | 2 +- 3 files changed, 43 insertions(+), 4 deletions(-) diff --git a/namedpipeout.cpp b/namedpipeout.cpp index 0af49e5..d0cc547 100644 --- a/namedpipeout.cpp +++ b/namedpipeout.cpp @@ -207,7 +207,7 @@ VOID GetAnswerToRequest(char *pReply, LPDWORD pchBytes, class InstanceConfig &in parent->Lock(); //Copy and resize frame if necessary (and invert y) - ResizeRgb24Image(parent->currentFrame, parent->currentFrameLen, + ResizeRgb24ImageNN(parent->currentFrame, parent->currentFrameLen, parent->currentFrameWidth, parent->currentFrameHeight, imgPix, diff --git a/pixfmt.cpp b/pixfmt.cpp index 980fa5d..82b5d41 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -590,7 +590,46 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, } // ********************************************************* -int ResizeRgb24Image(const unsigned char *data, unsigned dataLen, +int ResizeRgb24ImageNN(const unsigned char *data, unsigned dataLen, + int widthIn, int heightIn, + unsigned char *buffOut, + unsigned buffOutLen, + int widthOut, int heightOut, int invertVertical) +{ + //Simple crop of image to target buffer + for(int x = 0; x < widthOut; x++) + { + for(int y = 0; y < heightOut; y++) + { + unsigned outOffset = x*3 + (y*3*widthOut); + if(outOffset + 3 >= buffOutLen) continue; + unsigned char *outPx = &buffOut[outOffset]; + + //Scale position + double inx = (double)x * (double)widthIn / (double)widthOut; + double iny = (double)y * (double)heightIn / (double)heightOut; + + //Round to nearest pixel + int inxi = (int)(inx+0.5); + int inyi = (int)(iny+0.5); + + int row = inyi; + if(invertVertical) row = heightIn - inyi - 1; + unsigned inOffset = inxi*3 + (row*3*widthIn); + if(inOffset + 3 >= dataLen) continue; + const unsigned char *inPx = &data[inOffset]; + + outPx[0] = inPx[0]; + outPx[1] = inPx[1]; + outPx[2] = inPx[2]; + } + + } + + return 1; +} + +int CropToFitRgb24Image(const unsigned char *data, unsigned dataLen, int widthIn, int heightIn, unsigned char *buffOut, unsigned buffOutLen, @@ -619,4 +658,4 @@ int ResizeRgb24Image(const unsigned char *data, unsigned dataLen, } return 1; -} +} \ No newline at end of file diff --git a/pixfmt.h b/pixfmt.h index 8e7ec32..ffebee3 100644 --- a/pixfmt.h +++ b/pixfmt.h @@ -13,7 +13,7 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, int InsertHuffmanTableCTypes(const unsigned char* inBufferPtr, unsigned inBufferLen, std::string &outBuffer); -int ResizeRgb24Image(const unsigned char *data, unsigned dataLen, +int ResizeRgb24ImageNN(const unsigned char *data, unsigned dataLen, int widthIn, int heightIn, unsigned char *buffOut, unsigned buffOutLen, From 5422099189d2779ff2a382b44cf8aaacacc11f93 Mon Sep 17 00:00:00 2001 From: TimSC Date: Tue, 3 Dec 2013 18:07:09 +0000 Subject: [PATCH 174/256] Reduce buffers to reduce latency --- mfvideoin.cpp | 36 +++++++++++++++++++++++++++++++++--- 1 file changed, 33 insertions(+), 3 deletions(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index c92681b..e68ba00 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -277,6 +277,7 @@ class SourceReaderCB : public IMFSourceReaderCallback CRITICAL_SECTION lock; int framePending; unsigned int maxNumFrames; + unsigned int droppedFrames; vector frameBuff; vector frameLenBuff; @@ -290,7 +291,8 @@ class SourceReaderCB : public IMFSourceReaderCallback m_nRefCount = 0; framePending = 0; InitializeCriticalSection(&lock); - maxNumFrames = 10; + maxNumFrames = 1; + droppedFrames = 0; } virtual ~SourceReaderCB() @@ -310,29 +312,57 @@ class SourceReaderCB : public IMFSourceReaderCallback return QISearch(this, qit, iid, ppv); } + void CheckForBufferOverflow() + { + //The lock should already be in use + while(this->frameBuff.size() > this->maxNumFrames) + { + //Drop an old frame if buffer is starting to overflow + frameBuff.erase(frameBuff.begin()); + frameLenBuff.erase(frameLenBuff.begin()); + hrStatusBuff.erase(hrStatusBuff.begin()); + dwStreamIndexBuff.erase(dwStreamIndexBuff.begin()); + dwStreamFlagsBuff.erase(dwStreamFlagsBuff.begin()); + llTimestampBuff.erase(llTimestampBuff.begin()); + droppedFrames ++; + } + } + + void SetMaxBufferSize(unsigned maxBuffSizeIn) + { + EnterCriticalSection(&lock); + this->maxNumFrames = maxBuffSizeIn; + this->CheckForBufferOverflow(); + LeaveCriticalSection(&lock); + } + STDMETHODIMP OnReadSample(HRESULT hrStatus, DWORD dwStreamIndex, DWORD dwStreamFlags, LONGLONG llTimestamp, IMFSample *pSample) { //cout << "OnReadSample: " << llTimestamp << endl; EnterCriticalSection(&lock); - if (pSample && this->frameBuff.size() < this->maxNumFrames) + if (pSample) { char *buff = NULL; DWORD buffLen = SampleToStaticObj(pSample, &buff); //cout << (long) buff << "," << buffLen << endl; //if(buff!=NULL) delete [] buff; + //Always add frame to buffer frameBuff.push_back(buff); frameLenBuff.push_back(buffLen); hrStatusBuff.push_back(hrStatus); dwStreamIndexBuff.push_back(dwStreamIndex); dwStreamFlagsBuff.push_back(dwStreamFlags); llTimestampBuff.push_back(llTimestamp); + + this->CheckForBufferOverflow(); } this->framePending = 0; LeaveCriticalSection(&lock); + return S_OK; } @@ -460,7 +490,7 @@ MfVideoIn::MfVideoIn(const wchar_t *devNameIn) : WmfBase() this->startDevFlag = 0; this->stopDevFlag = 0; this->closeDevFlag = 0; - this->maxBuffSize = 10; + this->maxBuffSize = 1; InitializeCriticalSection(&lock); } From 358b82127de0411878830eea34eb5f6d439dbd47 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Fri, 13 Dec 2013 17:00:04 +0000 Subject: [PATCH 175/256] Check if camera supports time per frame --- v4l2capture.cpp | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 07e076b..cbaca68 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -15,6 +15,7 @@ #include #include #include +#include #include "pixfmt.h" @@ -416,6 +417,29 @@ int Video_in_Manager::StartDeviceInternal(int buffer_count = 10) if(!ret) throw std::runtime_error("Could not determine image format"); } + struct v4l2_streamparm streamparm; + memset (&streamparm, 0, sizeof (streamparm)); + streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + //Check if camera supports timeperframe + if(my_ioctl(this->fd, VIDIOC_G_PARM, &streamparm)) + { + throw std::runtime_error("VIDIOC_G_PARM failed"); + } + int timePerFrameSupported = (V4L2_CAP_TIMEPERFRAME & streamparm.parm.capture.capability) != 0; + + if(timePerFrameSupported) + { + /*struct v4l2_fract *tpf = &streamparm.parm.capture.timeperframe; + tpf->numerator = 1; + tpf->denominator = 25; + if(my_ioctl(this->fd, VIDIOC_S_PARM, &streamparm)) + { + throw std::runtime_error("VIDIOC_S_PARM failed"); + }*/ + + } + struct v4l2_requestbuffers reqbuf; reqbuf.count = buffer_count; reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; From d81b497587eb1a1612b334433080580af608a25c Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Fri, 13 Dec 2013 18:20:55 +0000 Subject: [PATCH 176/256] Problems getting exposure to manual mode, each has quirks --- v4l2capture.cpp | 109 +++++++++++++++++++++++++++++++++++++----------- v4l2capture.h | 2 + 2 files changed, 87 insertions(+), 24 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index cbaca68..18dfe7e 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -96,6 +96,25 @@ std::wstring CharArrayToWString(const char *in) return tmpDevName2; } +static void enumerate_menu (int fd, struct v4l2_queryctrl &queryctrl) +{ + struct v4l2_querymenu querymenu; + std::cout << " Menu items:" << std::endl; + + memset (&querymenu, 0, sizeof (querymenu)); + querymenu.id = queryctrl.id; + + for (querymenu.index = queryctrl.minimum; + querymenu.index <= queryctrl.maximum; + querymenu.index++) { + if (0 == my_ioctl (fd, VIDIOC_QUERYMENU, &querymenu)) { + std::cout << " " << querymenu.index << " " << querymenu.name << std::endl; + } else { + std::cout << " Error VIDIOC_QUERYMENU" << std::endl; + } + } +} + // ************************************************************************** Video_in_Manager::Video_in_Manager(const char *devNameIn) : Base_Video_In() @@ -304,6 +323,69 @@ int Video_in_Manager::OpenDeviceInternal() return 1; } +void Video_in_Manager::Test() +{ + /*struct v4l2_streamparm streamparm; + memset (&streamparm, 0, sizeof (streamparm)); + streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + //Check if camera supports timeperframe + if(my_ioctl(this->fd, VIDIOC_G_PARM, &streamparm)) + { + throw std::runtime_error("VIDIOC_G_PARM failed"); + } + int timePerFrameSupported = (V4L2_CAP_TIMEPERFRAME & streamparm.parm.capture.capability) != 0; + if(timePerFrameSupported) + { + + //Enurate framerates + //struct v4l2_frmivalenum frmrates; + //memset (&frmrates, 0, sizeof (v4l2_frmivalenum)); + //my_ioctl(this->fd, VIDIOC_ENUM_FRAMEINTERVALS, &frmrates); + //std::cout << "fr " << frmrates.discrete.numerator << "," << frmrates.discrete.denominator << std::endl; + + //Set frame rate + struct v4l2_fract *tpf = &streamparm.parm.capture.timeperframe; + tpf->numerator = 1; + tpf->denominator = 30; + if(my_ioctl(this->fd, VIDIOC_S_PARM, &streamparm)) + { + throw std::runtime_error("VIDIOC_S_PARM failed"); + } + + } + + //Query controls + struct v4l2_queryctrl queryctrl; + queryctrl.id = V4L2_CID_EXPOSURE_AUTO; + my_ioctl (this->fd, VIDIOC_QUERYCTRL, &queryctrl); + if (!(queryctrl.flags & V4L2_CTRL_FLAG_DISABLED)) + { + std::cout << "Control "<fd, queryctrl); + } +*/ +/* //Read control + struct v4l2_control control; + memset (&control, 0, sizeof (control)); + control.id = V4L2_CID_EXPOSURE_AUTO; + my_ioctl (fd, VIDIOC_QUERYCTRL, &control); + std::cout << "val1 " << control.value << std::endl;*/ +/* + //Set control + memset (&control, 0, sizeof (control)); + control.id = V4L2_CID_EXPOSURE_AUTO; + control.value = V4L2_EXPOSURE_MANUAL; + std::cout << "ret " << my_ioctl (fd, VIDIOC_S_CTRL, &control) << std::endl; + + //Confirm value + memset (&control, 0, sizeof (control)); + control.id = V4L2_CID_EXPOSURE_AUTO; + my_ioctl (fd, VIDIOC_QUERYCTRL, &control); + std::cout << "val2 " << control.value << std::endl;*/ +} + int Video_in_Manager::SetFormatInternal(class SetFormatParams &args) { if(verbose) printf("SetFormatInternal\n"); @@ -417,29 +499,6 @@ int Video_in_Manager::StartDeviceInternal(int buffer_count = 10) if(!ret) throw std::runtime_error("Could not determine image format"); } - struct v4l2_streamparm streamparm; - memset (&streamparm, 0, sizeof (streamparm)); - streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - - //Check if camera supports timeperframe - if(my_ioctl(this->fd, VIDIOC_G_PARM, &streamparm)) - { - throw std::runtime_error("VIDIOC_G_PARM failed"); - } - int timePerFrameSupported = (V4L2_CAP_TIMEPERFRAME & streamparm.parm.capture.capability) != 0; - - if(timePerFrameSupported) - { - /*struct v4l2_fract *tpf = &streamparm.parm.capture.timeperframe; - tpf->numerator = 1; - tpf->denominator = 25; - if(my_ioctl(this->fd, VIDIOC_S_PARM, &streamparm)) - { - throw std::runtime_error("VIDIOC_S_PARM failed"); - }*/ - - } - struct v4l2_requestbuffers reqbuf; reqbuf.count = buffer_count; reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; @@ -511,8 +570,10 @@ int Video_in_Manager::StartDeviceInternal(int buffer_count = 10) throw std::runtime_error("VIDIOC_STREAMON failed"); } + this->Test(); + this->deviceStarted = 1; - if(verbose) printf("Started ok\n"); + if(verbose) printf("Started ok\n"); return 1; } diff --git a/v4l2capture.h b/v4l2capture.h index d87ab3c..3f35477 100644 --- a/v4l2capture.h +++ b/v4l2capture.h @@ -109,6 +109,8 @@ class Video_in_Manager : public Base_Video_In void CloseDevice(); int GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut); + void Test(); + protected: int ReadFrame(); int OpenDeviceInternal(); From 6f473d2ce4fcf69e39c371c6ab9560f3a5c41de1 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 16 Dec 2013 17:40:10 +0000 Subject: [PATCH 177/256] Add video file writer to python api --- libvideolive.cpp | 37 +++++++++++- mfvideooutfile.cpp | 67 +++++++++++++++++++++ mfvideooutfile.h | 29 +++++++++ setup.py | 6 +- videooutfile.cpp | 146 +++++++++++++++++++++++++++++++++++++++++++++ videooutfile.h | 28 +++++++++ videooutfile.py | 25 ++++++++ 7 files changed, 333 insertions(+), 5 deletions(-) create mode 100644 mfvideooutfile.cpp create mode 100644 mfvideooutfile.h create mode 100644 videooutfile.cpp create mode 100644 videooutfile.h create mode 100644 videooutfile.py diff --git a/libvideolive.cpp b/libvideolive.cpp index 95349fe..7a17662 100644 --- a/libvideolive.cpp +++ b/libvideolive.cpp @@ -17,6 +17,7 @@ #include "pixfmt.h" #include "videoout.h" #include "videoin.h" +#include "videooutfile.h" // ********************************************************************* @@ -116,7 +117,7 @@ static PyMethodDef Video_out_manager_methods[] = { static PyTypeObject Video_out_manager_type = { PyObject_HEAD_INIT(NULL) - 0, "v4l2capture.Video_out_manager", sizeof(Video_out_manager), 0, + 0, "v4l2capture.Video_out_stream_manager", sizeof(Video_out_manager), 0, (destructor)Video_out_manager_dealloc, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, Py_TPFLAGS_DEFAULT, "Video_out_manager(path)\n\nOpens the video device at " "the given path and returns an object that can capture images. The " @@ -125,6 +126,30 @@ static PyTypeObject Video_out_manager_type = { (initproc)Video_out_manager_init }; +static PyMethodDef Video_out_file_manager_methods[] = { + {"open", (PyCFunction)Video_out_file_manager_open, METH_VARARGS, + "open(filename = '\\dev\\video0', pixel_format, width, height)\n\n" + "Open video output."}, + {"send_frame", (PyCFunction)Video_out_file_manager_Send_frame, METH_VARARGS, + "send_frame(dev = '\\dev\\video0', img, pixel_format, width, height)\n\n" + "Send frame to video stream output."}, + {"close", (PyCFunction)Video_out_file_manager_close, METH_VARARGS, + "close(dev = '\\dev\\video0')\n\n" + "Close video device. Subsequent calls to other methods will fail."}, + {NULL} +}; + +static PyTypeObject Video_out_file_manager_type = { + PyObject_HEAD_INIT(NULL) + 0, "v4l2capture.Video_out_file_manager", sizeof(Video_out_manager), 0, + (destructor)Video_out_file_manager_dealloc, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, Py_TPFLAGS_DEFAULT, "Video_out_manager(path)\n\nOpens the video device at " + "the given path and returns an object that can capture images. The " + "constructor and all methods except close may raise IOError.", 0, 0, 0, + 0, 0, 0, Video_out_file_manager_methods, 0, 0, 0, 0, 0, 0, 0, + (initproc)Video_out_file_manager_init +}; + // ********************************************************************* static PyMethodDef module_methods[] = { @@ -136,6 +161,7 @@ PyMODINIT_FUNC initvideolive(void) { Device_manager_type.tp_new = PyType_GenericNew; Video_out_manager_type.tp_new = PyType_GenericNew; + Video_out_file_manager_type.tp_new = PyType_GenericNew; if(PyType_Ready(&Device_manager_type) < 0) { @@ -145,6 +171,10 @@ PyMODINIT_FUNC initvideolive(void) { return; } + if(PyType_Ready(&Video_out_file_manager_type) < 0) + { + return; + } PyObject *module = Py_InitModule3("videolive", module_methods, "Capture and stream video."); @@ -155,7 +185,8 @@ PyMODINIT_FUNC initvideolive(void) } Py_INCREF(&Device_manager_type); - PyModule_AddObject(module, "Video_in_manager", (PyObject *)&Device_manager_type); - PyModule_AddObject(module, "Video_out_manager", (PyObject *)&Video_out_manager_type); + PyModule_AddObject(module, "Video_in_stream_manager", (PyObject *)&Device_manager_type); + PyModule_AddObject(module, "Video_out_stream_manager", (PyObject *)&Video_out_manager_type); + PyModule_AddObject(module, "Video_out_file_manager", (PyObject *)&Video_out_file_manager_type); } diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp new file mode 100644 index 0000000..7eeb915 --- /dev/null +++ b/mfvideooutfile.cpp @@ -0,0 +1,67 @@ + +#include "mfvideooutfile.h" +#include +#include + +MfVideoOutFile::MfVideoOutFile(const char *devName) : Base_Video_Out() +{ + HRESULT hr = MFStartup(MF_VERSION); + if(!SUCCEEDED(hr)) + throw std::runtime_error("Media foundation startup failed"); + + hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED); + if(hr == RPC_E_CHANGED_MODE) + throw std::runtime_error("CoInitializeEx failed"); + + + + + +} + +MfVideoOutFile::~MfVideoOutFile() +{ + MFShutdown(); + + CoUninitialize(); +} + +void MfVideoOutFile::SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) +{ + +} + +void MfVideoOutFile::Stop() +{ + +} + +int MfVideoOutFile::WaitForStop() +{ + return 1; +} + +void MfVideoOutFile::SetOutputSize(int width, int height) +{ + +} + +void MfVideoOutFile::SetOutputPxFmt(const char *fmt) +{ + +} + +void MfVideoOutFile::Run() +{ + +} + +//******************************************************************************* + +void *MfVideoOut_File_Worker_thread(void *arg) +{ + class MfVideoOutFile *argobj = (class MfVideoOutFile*) arg; + argobj->Run(); + + return NULL; +} diff --git a/mfvideooutfile.h b/mfvideooutfile.h new file mode 100644 index 0000000..f0b7f35 --- /dev/null +++ b/mfvideooutfile.h @@ -0,0 +1,29 @@ + +#ifndef MFVIDEOOUT_H +#define MFVIDEOOUT_H + +#include +#include +#include "base.h" + +class MfVideoOutFile : public Base_Video_Out +{ +public: + MfVideoOutFile(const char *devName); + virtual ~MfVideoOutFile(); + + void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height); + void Stop(); + int WaitForStop(); + + virtual void SetOutputSize(int width, int height); + virtual void SetOutputPxFmt(const char *fmt); + + void Run(); + +}; + +void *MfVideoOut_File_Worker_thread(void *arg); + +#endif //MFVIDEOOUT_H + diff --git a/setup.py b/setup.py index 6b54ece..7362782 100755 --- a/setup.py +++ b/setup.py @@ -23,7 +23,8 @@ c_args=[] l_args=["/MANIFEST"] - videolive = Extension("videolive", ["pixfmt.cpp", "libvideolive.cpp", "videoout.cpp", "videoin.cpp", "mfvideoin.cpp", "namedpipeout.cpp"], + videolive = Extension("videolive", ["pixfmt.cpp", "libvideolive.cpp", "videoout.cpp", "videoin.cpp", "mfvideoin.cpp", "namedpipeout.cpp", + "videooutfile.cpp", "mfvideooutfile.cpp"], define_macros=[('_'+os.name.upper(), None)], library_dirs=['C:\Dev\Lib\libjpeg-turbo-win\lib', "C:\Dev\Lib\pthreads\pthreads.2"], include_dirs=['C:\Dev\Lib\libjpeg-turbo-win\include', "C:\Dev\Lib\pthreads\pthreads.2"], @@ -32,7 +33,8 @@ libraries = ["pthreadVC2", "jpeg", "Mfplat", "Mf", "Mfreadwrite", "Ole32", "mfuuid", "Shlwapi"]) else: - videolive = Extension("videolive", ["v4l2capture.cpp", "v4l2out.cpp", "pixfmt.cpp", "libvideolive.cpp", "videoout.cpp", "videoin.cpp"], + videolive = Extension("videolive", ["v4l2capture.cpp", "v4l2out.cpp", "pixfmt.cpp", "libvideolive.cpp", "videoout.cpp", "videoin.cpp", + "videooutfile.cpp"], define_macros=[('_'+os.name.upper(), None)], libraries = ["v4l2", "pthread", "jpeg"]) diff --git a/videooutfile.cpp b/videooutfile.cpp new file mode 100644 index 0000000..3059a67 --- /dev/null +++ b/videooutfile.cpp @@ -0,0 +1,146 @@ + +#include +#include +#include +#include "videooutfile.h" +#ifdef _NT +#include "mfvideooutfile.h" +#endif +#if _POSIX +//TODO +#endif + +int Video_out_file_manager_init(Video_out_file_manager *self, PyObject *args, + PyObject *kwargs) +{ + self->threads = new std::map; + return 0; +} + +void Video_out_file_manager_dealloc(Video_out_file_manager *self) +{ + //Stop high level threads + for(std::map::iterator it = self->threads->begin(); + it != self->threads->end(); it++) + { + it->second->Stop(); + it->second->WaitForStop(); + } + + delete self->threads; + self->threads = NULL; + self->ob_type->tp_free((PyObject *)self); +} + +PyObject *Video_out_file_manager_open(Video_out_file_manager *self, PyObject *args) +{ + std::cout << "Video_out_manager_open" << std::endl; + + //Process arguments + const char *devarg = NULL; + const char *pxFmtIn = NULL; + int widthIn = 0; + int heightIn = 0; + + if(!PyArg_ParseTuple(args, "ssii", &devarg, &pxFmtIn, &widthIn, &heightIn)) + { + std::cout << "err" << std::endl; + PyErr_Format(PyExc_RuntimeError, "Incorrect arguments to function."); + Py_RETURN_NONE; + } + + //Create worker thread + pthread_t thread; + #ifdef _POSIX + //TODO + #endif + #ifdef _NT + MfVideoOutFile *threadArgs = new MfVideoOutFile(devarg); + #endif + + #ifdef _NT //TODO Remove ifdef when POSIX approah is established + (*self->threads)[devarg] = threadArgs; + threadArgs->SetOutputSize(widthIn, heightIn); + threadArgs->SetOutputPxFmt(pxFmtIn); + #endif + + #ifdef _POSIX + //TODO + #endif + #ifdef _NT + pthread_create(&thread, NULL, MfVideoOut_File_Worker_thread, threadArgs); + #endif + + Py_RETURN_NONE; +} + +PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObject *args) +{ + //printf("Video_out_manager_Send_frame\n"); + //dev = '\\dev\\video0', img, pixel_format, width, height + + //Process arguments + const char *devarg = NULL; + const char *imgIn = NULL; + const char *pxFmtIn = NULL; + int widthIn = 0; + int heightIn = 0; + + if(PyObject_Length(args) < 5) + { + PyErr_Format(PyExc_RuntimeError, "Too few arguments."); + Py_RETURN_NONE; + } + + PyObject *pydev = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydev); + + PyObject *pyimg = PyTuple_GetItem(args, 1); + imgIn = PyString_AsString(pyimg); + Py_ssize_t imgLen = PyObject_Length(pyimg); + + PyObject *pyPxFmt = PyTuple_GetItem(args, 2); + pxFmtIn = PyString_AsString(pyPxFmt); + + PyObject *pyWidth = PyTuple_GetItem(args, 3); + widthIn = PyInt_AsLong(pyWidth); + + PyObject *pyHeight = PyTuple_GetItem(args, 4); + heightIn = PyInt_AsLong(pyHeight); + + std::map::iterator it = self->threads->find(devarg); + + if(it != self->threads->end()) + { + it->second->SendFrame(imgIn, imgLen, pxFmtIn, widthIn, heightIn); + } + else + { + PyErr_Format(PyExc_RuntimeError, "Device not found."); + Py_RETURN_NONE; + } + + Py_RETURN_NONE; +} + +PyObject *Video_out_file_manager_close(Video_out_file_manager *self, PyObject *args) +{ + //Process arguments + const char *devarg = "/dev/video0"; + if(PyTuple_Size(args) >= 1) + { + PyObject *pydevarg = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydevarg); + } + + //Stop worker thread + std::map::iterator it = self->threads->find(devarg); + + if(it != self->threads->end()) + { + it->second->Stop(); + } + + Py_RETURN_NONE; +} + diff --git a/videooutfile.h b/videooutfile.h new file mode 100644 index 0000000..2de4919 --- /dev/null +++ b/videooutfile.h @@ -0,0 +1,28 @@ + +#ifndef VIDEOOUTFILE_H +#define VIDEOOUTFILE_H + +#include +#include +#include +#include "base.h" + +class Video_out_file_manager_cl{ +public: + PyObject_HEAD + std::map *threads; +}; +typedef Video_out_file_manager_cl Video_out_file_manager; + +int Video_out_file_manager_init(Video_out_file_manager *self, PyObject *args, + PyObject *kwargs); + +void Video_out_file_manager_dealloc(Video_out_file_manager *self); + +PyObject *Video_out_file_manager_open(Video_out_file_manager *self, PyObject *args); +PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObject *args); +PyObject *Video_out_file_manager_close(Video_out_file_manager *self, PyObject *args); + + +#endif //VIDEOOUTFILE_H + diff --git a/videooutfile.py b/videooutfile.py new file mode 100644 index 0000000..b565aaa --- /dev/null +++ b/videooutfile.py @@ -0,0 +1,25 @@ + +import videolive, time +import numpy as np + +if __name__=="__main__": + outManager = videolive.Video_out_file_manager() + print outManager + + outManager.open("test.wmv", "RGB24", 640, 480) + + imgLen = 640 * 480 * 3 + img = np.zeros(shape=(imgLen,), dtype=np.uint8) + + for i in range(imgLen): + if (i % 500) > 250: + img[i] = np.random.randint(0, 255) + else: + img[i] = 128 + + for i in range(100): + print "Frame", i + outManager.send_frame("test.wmv", str(img.tostring()), "RGB24", 640, 480) + + time.sleep(0.1) + From 165acbb3d86c2a81c2259e67afb5c1154a520de9 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 16 Dec 2013 18:28:44 +0000 Subject: [PATCH 178/256] Adapt video writer example to oop design --- mfvideooutfile.cpp | 193 +++++++++++++++++++++++++++++++++++++++++++++ mfvideooutfile.h | 8 ++ setup.py | 6 +- 3 files changed, 204 insertions(+), 3 deletions(-) diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index 7eeb915..d91045b 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -1,7 +1,27 @@ #include "mfvideooutfile.h" +#include #include #include +using namespace std; + +template void SafeRelease(T **ppT) +{ + if (*ppT) + { + (*ppT)->Release(); + *ppT = NULL; + } +} + +const UINT32 VIDEO_WIDTH = 640; +const UINT32 VIDEO_HEIGHT = 480; +const UINT32 VIDEO_FPS = 25; +const UINT32 VIDEO_BIT_RATE = 800000; +const GUID VIDEO_ENCODING_FORMAT = MFVideoFormat_WMV3; +const GUID VIDEO_INPUT_FORMAT = MFVideoFormat_RGB24; +const UINT32 VIDEO_PELS = VIDEO_WIDTH * VIDEO_HEIGHT; +const UINT32 VIDEO_FRAME_COUNT = 20 * VIDEO_FPS; MfVideoOutFile::MfVideoOutFile(const char *devName) : Base_Video_Out() { @@ -13,14 +33,119 @@ MfVideoOutFile::MfVideoOutFile(const char *devName) : Base_Video_Out() if(hr == RPC_E_CHANGED_MODE) throw std::runtime_error("CoInitializeEx failed"); + this->pSinkWriter = NULL; + IMFMediaType *pMediaTypeOut = NULL; + IMFMediaType *pMediaTypeIn = NULL; + this->streamIndex = 0; + + this->rtStart = 0; + MFFrameRateToAverageTimePerFrame(VIDEO_FPS, 1, &this->rtDuration); + hr = MFCreateSinkWriterFromURL(L"output.wmv", NULL, NULL, &pSinkWriter); + // Set the output media type. + if (SUCCEEDED(hr)) + { + hr = MFCreateMediaType(&pMediaTypeOut); + } + if (SUCCEEDED(hr)) + { + hr = pMediaTypeOut->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); + } + if (SUCCEEDED(hr)) + { + hr = pMediaTypeOut->SetGUID(MF_MT_SUBTYPE, VIDEO_ENCODING_FORMAT); + } + if (SUCCEEDED(hr)) + { + hr = pMediaTypeOut->SetUINT32(MF_MT_AVG_BITRATE, VIDEO_BIT_RATE); + } + if (SUCCEEDED(hr)) + { + hr = pMediaTypeOut->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive); + } + if (SUCCEEDED(hr)) + { + hr = MFSetAttributeSize(pMediaTypeOut, MF_MT_FRAME_SIZE, VIDEO_WIDTH, VIDEO_HEIGHT); + } + if (SUCCEEDED(hr)) + { + hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_FRAME_RATE, VIDEO_FPS, 1); + } + if (SUCCEEDED(hr)) + { + hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_PIXEL_ASPECT_RATIO, 1, 1); + } + if (SUCCEEDED(hr)) + { + hr = pSinkWriter->AddStream(pMediaTypeOut, &streamIndex); + } + + // Set the input media type. + if (SUCCEEDED(hr)) + { + hr = MFCreateMediaType(&pMediaTypeIn); + } + if (SUCCEEDED(hr)) + { + hr = pMediaTypeIn->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); + } + + if (SUCCEEDED(hr)) + { + hr = pMediaTypeIn->SetGUID(MF_MT_SUBTYPE, VIDEO_INPUT_FORMAT); + } + if (SUCCEEDED(hr)) + { + hr = pMediaTypeIn->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive); + } + if (SUCCEEDED(hr)) + { + hr = MFSetAttributeSize(pMediaTypeIn, MF_MT_FRAME_SIZE, VIDEO_WIDTH, VIDEO_HEIGHT); + } + if (SUCCEEDED(hr)) + { + hr = MFSetAttributeRatio(pMediaTypeIn, MF_MT_FRAME_RATE, VIDEO_FPS, 1); + } + if (SUCCEEDED(hr)) + { + hr = MFSetAttributeRatio(pMediaTypeIn, MF_MT_PIXEL_ASPECT_RATIO, 1, 1); + } + if (SUCCEEDED(hr)) + { + hr = pSinkWriter->SetInputMediaType(streamIndex, pMediaTypeIn, NULL); + } + + // Tell the sink writer to start accepting data. + if (SUCCEEDED(hr)) + { + hr = pSinkWriter->BeginWriting(); + } + + // Return the pointer to the caller. + if (SUCCEEDED(hr)) + { + /**ppWriter = pSinkWriter; + (*ppWriter)->AddRef(); + *pStreamIndex = streamIndex;*/ + } + + //SafeRelease(&pSinkWriter); + SafeRelease(&pMediaTypeOut); + SafeRelease(&pMediaTypeIn); + return; } MfVideoOutFile::~MfVideoOutFile() { + if(this->pSinkWriter != NULL) + { + HRESULT hr = this->pSinkWriter->Finalize(); + } + SafeRelease(&pSinkWriter); + MFShutdown(); CoUninitialize(); @@ -28,7 +153,74 @@ MfVideoOutFile::~MfVideoOutFile() void MfVideoOutFile::SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) { + IMFSample *pSample = NULL; + IMFMediaBuffer *pBuffer = NULL; + + const LONG cbWidth = 3 * VIDEO_WIDTH; + const DWORD cbBuffer = cbWidth * VIDEO_HEIGHT; + BYTE *pData = NULL; + + // Create a new memory buffer. + HRESULT hr = MFCreateMemoryBuffer(cbBuffer, &pBuffer); + + // Lock the buffer and copy the video frame to the buffer. + if (SUCCEEDED(hr)) + { + hr = pBuffer->Lock(&pData, NULL, NULL); + } + if (SUCCEEDED(hr)) + { + hr = MFCopyImage( + pData, // Destination buffer. + cbWidth, // Destination stride. + (BYTE*)imgIn, // First row in source image. + cbWidth, // Source stride. + cbWidth, // Image width in bytes. + VIDEO_HEIGHT // Image height in pixels. + ); + } + if (pBuffer) + { + pBuffer->Unlock(); + } + + // Set the data length of the buffer. + if (SUCCEEDED(hr)) + { + hr = pBuffer->SetCurrentLength(cbBuffer); + } + + // Create a media sample and add the buffer to the sample. + if (SUCCEEDED(hr)) + { + hr = MFCreateSample(&pSample); + } + if (SUCCEEDED(hr)) + { + hr = pSample->AddBuffer(pBuffer); + } + + // Set the time stamp and the duration. + if (SUCCEEDED(hr)) + { + hr = pSample->SetSampleTime(rtStart); + } + if (SUCCEEDED(hr)) + { + hr = pSample->SetSampleDuration(rtDuration); + } + + // Send the sample to the Sink Writer. + if (SUCCEEDED(hr) && this->pSinkWriter != NULL) + { + hr = this->pSinkWriter->WriteSample(streamIndex, pSample); + } + + this->rtStart += this->rtDuration; + + SafeRelease(&pSample); + SafeRelease(&pBuffer); } void MfVideoOutFile::Stop() @@ -54,6 +246,7 @@ void MfVideoOutFile::SetOutputPxFmt(const char *fmt) void MfVideoOutFile::Run() { + } //******************************************************************************* diff --git a/mfvideooutfile.h b/mfvideooutfile.h index f0b7f35..8eb0e4e 100644 --- a/mfvideooutfile.h +++ b/mfvideooutfile.h @@ -4,6 +4,8 @@ #include #include +#include +#include #include "base.h" class MfVideoOutFile : public Base_Video_Out @@ -21,6 +23,12 @@ class MfVideoOutFile : public Base_Video_Out void Run(); +protected: + IMFSinkWriter *pSinkWriter; + DWORD streamIndex; + LONGLONG rtStart; + UINT64 rtDuration; + }; void *MfVideoOut_File_Worker_thread(void *arg); diff --git a/setup.py b/setup.py index 7362782..e3848f4 100755 --- a/setup.py +++ b/setup.py @@ -13,7 +13,7 @@ from distutils.core import Extension, setup import os -debug = 0 +debug = 1 if os.name == "nt": if debug: @@ -23,8 +23,8 @@ c_args=[] l_args=["/MANIFEST"] - videolive = Extension("videolive", ["pixfmt.cpp", "libvideolive.cpp", "videoout.cpp", "videoin.cpp", "mfvideoin.cpp", "namedpipeout.cpp", - "videooutfile.cpp", "mfvideooutfile.cpp"], + videolive = Extension("videolive", ["mfvideooutfile.cpp", "pixfmt.cpp", "libvideolive.cpp", "videoout.cpp", "videoin.cpp", "mfvideoin.cpp", "namedpipeout.cpp", + "videooutfile.cpp"], define_macros=[('_'+os.name.upper(), None)], library_dirs=['C:\Dev\Lib\libjpeg-turbo-win\lib', "C:\Dev\Lib\pthreads\pthreads.2"], include_dirs=['C:\Dev\Lib\libjpeg-turbo-win\include', "C:\Dev\Lib\pthreads\pthreads.2"], From 334df992a1c95e3531d61efb574e813c623231fe Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 16 Dec 2013 18:41:01 +0000 Subject: [PATCH 179/256] Move video open and close to separate methods --- mfvideooutfile.cpp | 47 +++++++++++++++++++++++++++------------------- mfvideooutfile.h | 4 ++++ 2 files changed, 32 insertions(+), 19 deletions(-) diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index d91045b..3e4bc1e 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -1,6 +1,7 @@ #include "mfvideooutfile.h" #include +#include #include #include using namespace std; @@ -23,7 +24,7 @@ const GUID VIDEO_INPUT_FORMAT = MFVideoFormat_RGB24; const UINT32 VIDEO_PELS = VIDEO_WIDTH * VIDEO_HEIGHT; const UINT32 VIDEO_FRAME_COUNT = 20 * VIDEO_FPS; -MfVideoOutFile::MfVideoOutFile(const char *devName) : Base_Video_Out() +MfVideoOutFile::MfVideoOutFile(const char *fiName) : Base_Video_Out() { HRESULT hr = MFStartup(MF_VERSION); if(!SUCCEEDED(hr)) @@ -34,19 +35,37 @@ MfVideoOutFile::MfVideoOutFile(const char *devName) : Base_Video_Out() throw std::runtime_error("CoInitializeEx failed"); this->pSinkWriter = NULL; - IMFMediaType *pMediaTypeOut = NULL; - IMFMediaType *pMediaTypeIn = NULL; this->streamIndex = 0; + this->rtStart = 0; +} + +MfVideoOutFile::~MfVideoOutFile() +{ + this->CloseFile(); + + MFShutdown(); + + CoUninitialize(); +} +void MfVideoOutFile::OpenFile() +{ + + if(this->pSinkWriter != NULL) + { + throw std::runtime_error("Video output file already open"); + } this->rtStart = 0; + IMFMediaType *pMediaTypeOut = NULL; + IMFMediaType *pMediaTypeIn = NULL; MFFrameRateToAverageTimePerFrame(VIDEO_FPS, 1, &this->rtDuration); - hr = MFCreateSinkWriterFromURL(L"output.wmv", NULL, NULL, &pSinkWriter); + HRESULT hr = MFCreateSinkWriterFromURL(L"output.wmv", NULL, NULL, &pSinkWriter); // Set the output media type. if (SUCCEEDED(hr)) { - hr = MFCreateMediaType(&pMediaTypeOut); + hr = MFCreateMediaType(&pMediaTypeOut); } if (SUCCEEDED(hr)) { @@ -124,31 +143,18 @@ MfVideoOutFile::MfVideoOutFile(const char *devName) : Base_Video_Out() hr = pSinkWriter->BeginWriting(); } - // Return the pointer to the caller. - if (SUCCEEDED(hr)) - { - /**ppWriter = pSinkWriter; - (*ppWriter)->AddRef(); - *pStreamIndex = streamIndex;*/ - } - - //SafeRelease(&pSinkWriter); SafeRelease(&pMediaTypeOut); SafeRelease(&pMediaTypeIn); return; } -MfVideoOutFile::~MfVideoOutFile() +void MfVideoOutFile::CloseFile() { if(this->pSinkWriter != NULL) { HRESULT hr = this->pSinkWriter->Finalize(); } SafeRelease(&pSinkWriter); - - MFShutdown(); - - CoUninitialize(); } void MfVideoOutFile::SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) @@ -159,6 +165,9 @@ void MfVideoOutFile::SendFrame(const char *imgIn, unsigned imgLen, const char *p const LONG cbWidth = 3 * VIDEO_WIDTH; const DWORD cbBuffer = cbWidth * VIDEO_HEIGHT; + if(this->pSinkWriter == NULL) + this->OpenFile(); + BYTE *pData = NULL; // Create a new memory buffer. diff --git a/mfvideooutfile.h b/mfvideooutfile.h index 8eb0e4e..b298855 100644 --- a/mfvideooutfile.h +++ b/mfvideooutfile.h @@ -14,6 +14,9 @@ class MfVideoOutFile : public Base_Video_Out MfVideoOutFile(const char *devName); virtual ~MfVideoOutFile(); + void OpenFile(); + void CloseFile(); + void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height); void Stop(); int WaitForStop(); @@ -23,6 +26,7 @@ class MfVideoOutFile : public Base_Video_Out void Run(); + protected: IMFSinkWriter *pSinkWriter; DWORD streamIndex; From 00b8f741424dc97e47157aef4b4021f47071189c Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 16 Dec 2013 18:50:52 +0000 Subject: [PATCH 180/256] Randomize every frame in video out tes --- mfvideooutfile.cpp | 4 ++-- videooutfile.py | 16 ++++++++-------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index 3e4bc1e..f948678 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -213,11 +213,11 @@ void MfVideoOutFile::SendFrame(const char *imgIn, unsigned imgLen, const char *p // Set the time stamp and the duration. if (SUCCEEDED(hr)) { - hr = pSample->SetSampleTime(rtStart); + hr = pSample->SetSampleTime(this->rtStart); } if (SUCCEEDED(hr)) { - hr = pSample->SetSampleDuration(rtDuration); + hr = pSample->SetSampleDuration(this->rtDuration); } // Send the sample to the Sink Writer. diff --git a/videooutfile.py b/videooutfile.py index b565aaa..9c52bca 100644 --- a/videooutfile.py +++ b/videooutfile.py @@ -11,14 +11,14 @@ imgLen = 640 * 480 * 3 img = np.zeros(shape=(imgLen,), dtype=np.uint8) - for i in range(imgLen): - if (i % 500) > 250: - img[i] = np.random.randint(0, 255) - else: - img[i] = 128 - - for i in range(100): - print "Frame", i + for frNum in range(1000): + for i in range(imgLen): + if (i % 500) > 250: + img[i] = np.random.randint(0, 255) + else: + img[i] = 128 + + print "Frame", frNum outManager.send_frame("test.wmv", str(img.tostring()), "RGB24", 640, 480) time.sleep(0.1) From 3cf8b2ea65f9238ba329d1478f0138e8f8d39aa0 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 16 Dec 2013 18:55:24 +0000 Subject: [PATCH 181/256] Change randomisation and make faster --- videooutfile.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/videooutfile.py b/videooutfile.py index 9c52bca..d6aa888 100644 --- a/videooutfile.py +++ b/videooutfile.py @@ -12,14 +12,13 @@ img = np.zeros(shape=(imgLen,), dtype=np.uint8) for frNum in range(1000): - for i in range(imgLen): - if (i % 500) > 250: - img[i] = np.random.randint(0, 255) - else: - img[i] = 128 + img = np.random.randint(0, 255, size=(imgLen,)) + #for i in range(imgLen): + # if (i % 500) <= 250: + # img[i] = 128 print "Frame", frNum outManager.send_frame("test.wmv", str(img.tostring()), "RGB24", 640, 480) - time.sleep(0.1) + time.sleep(0.01) From 41a0a03ce1a5e70744c95853e228522083f6f3a1 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 16 Dec 2013 19:48:32 +0000 Subject: [PATCH 182/256] Find appropriate pixel layout for video file writer --- mfvideooutfile.cpp | 31 +++++++++++++++++++++++++++---- videooutfile.py | 15 ++++++++++----- 2 files changed, 37 insertions(+), 9 deletions(-) diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index f948678..1e0631f 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -20,9 +20,10 @@ const UINT32 VIDEO_HEIGHT = 480; const UINT32 VIDEO_FPS = 25; const UINT32 VIDEO_BIT_RATE = 800000; const GUID VIDEO_ENCODING_FORMAT = MFVideoFormat_WMV3; -const GUID VIDEO_INPUT_FORMAT = MFVideoFormat_RGB24; +const GUID VIDEO_INPUT_FORMAT = MFVideoFormat_RGB32; const UINT32 VIDEO_PELS = VIDEO_WIDTH * VIDEO_HEIGHT; const UINT32 VIDEO_FRAME_COUNT = 20 * VIDEO_FPS; +const UINT32 BYTES_PER_TUPLE = 4; MfVideoOutFile::MfVideoOutFile(const char *fiName) : Base_Video_Out() { @@ -159,10 +160,11 @@ void MfVideoOutFile::CloseFile() void MfVideoOutFile::SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) { + IMFSample *pSample = NULL; IMFMediaBuffer *pBuffer = NULL; - const LONG cbWidth = 3 * VIDEO_WIDTH; + const LONG cbWidth = BYTES_PER_TUPLE * VIDEO_WIDTH; const DWORD cbBuffer = cbWidth * VIDEO_HEIGHT; if(this->pSinkWriter == NULL) @@ -180,14 +182,35 @@ void MfVideoOutFile::SendFrame(const char *imgIn, unsigned imgLen, const char *p } if (SUCCEEDED(hr)) { - hr = MFCopyImage( + //unsigned cpyLen = imgLen; + //if(cpyLen > cbBuffer) cpyLen = cbBuffer; + //memcpy(pData, imgIn, cpyLen); + /*hr = MFCopyImage( pData, // Destination buffer. cbWidth, // Destination stride. (BYTE*)imgIn, // First row in source image. cbWidth, // Source stride. cbWidth, // Image width in bytes. VIDEO_HEIGHT // Image height in pixels. - ); + );*/ + for(int y = 0; y < height; y++) + { + int x = 0; + for(x = 0; x < width; x++) + { + + BYTE *dstPx = &pData[x * BYTES_PER_TUPLE + y * cbWidth]; + const BYTE *srcPx = (const BYTE *)&imgIn[x * 3 + y * width * 3]; + dstPx[2] = srcPx[0]; //Red + dstPx[1] = srcPx[1]; //Green + dstPx[0] = srcPx[2]; //Blue + //dstPx[0] = 0xff; + //dstPx[1] = 0xff; + //dstPx[2] = 0xff; + dstPx[3] = 0xff; //Alpha + } + } + } if (pBuffer) { diff --git a/videooutfile.py b/videooutfile.py index d6aa888..0865cc5 100644 --- a/videooutfile.py +++ b/videooutfile.py @@ -1,5 +1,5 @@ -import videolive, time +import videolive, time, random import numpy as np if __name__=="__main__": @@ -9,14 +9,19 @@ outManager.open("test.wmv", "RGB24", 640, 480) imgLen = 640 * 480 * 3 - img = np.zeros(shape=(imgLen,), dtype=np.uint8) + img = np.ones(shape=(imgLen,), dtype=np.uint8) * 0 + for i in range(imgLen): + if (i % 3) == 0: + img[i] = 0xff + if (i % 3) == 1: + img[i] = random.randint(0,255) - for frNum in range(1000): - img = np.random.randint(0, 255, size=(imgLen,)) + for frNum in range(200): + #img = np.random.randint(0, 255, size=(imgLen,)) #for i in range(imgLen): # if (i % 500) <= 250: # img[i] = 128 - + print "Frame", frNum outManager.send_frame("test.wmv", str(img.tostring()), "RGB24", 640, 480) From 013944669dba6270ac6f0fc032e7d3166ac202a8 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 16 Dec 2013 19:50:06 +0000 Subject: [PATCH 183/256] Remove alpha channel from video writer --- mfvideooutfile.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index 1e0631f..44f8cc2 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -20,10 +20,10 @@ const UINT32 VIDEO_HEIGHT = 480; const UINT32 VIDEO_FPS = 25; const UINT32 VIDEO_BIT_RATE = 800000; const GUID VIDEO_ENCODING_FORMAT = MFVideoFormat_WMV3; -const GUID VIDEO_INPUT_FORMAT = MFVideoFormat_RGB32; +const GUID VIDEO_INPUT_FORMAT = MFVideoFormat_RGB24; const UINT32 VIDEO_PELS = VIDEO_WIDTH * VIDEO_HEIGHT; const UINT32 VIDEO_FRAME_COUNT = 20 * VIDEO_FPS; -const UINT32 BYTES_PER_TUPLE = 4; +const UINT32 BYTES_PER_TUPLE = 3; MfVideoOutFile::MfVideoOutFile(const char *fiName) : Base_Video_Out() { @@ -207,7 +207,7 @@ void MfVideoOutFile::SendFrame(const char *imgIn, unsigned imgLen, const char *p //dstPx[0] = 0xff; //dstPx[1] = 0xff; //dstPx[2] = 0xff; - dstPx[3] = 0xff; //Alpha + //dstPx[3] = 0xff; //Alpha } } From 0c1166d2b24d03e3ac9060aa4a1e6fee88680aa8 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 16 Dec 2013 19:50:24 +0000 Subject: [PATCH 184/256] Remove alpha channel from video writer --- mfvideooutfile.cpp | 19 +------------------ 1 file changed, 1 insertion(+), 18 deletions(-) diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index 44f8cc2..2a10955 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -182,32 +182,15 @@ void MfVideoOutFile::SendFrame(const char *imgIn, unsigned imgLen, const char *p } if (SUCCEEDED(hr)) { - //unsigned cpyLen = imgLen; - //if(cpyLen > cbBuffer) cpyLen = cbBuffer; - //memcpy(pData, imgIn, cpyLen); - /*hr = MFCopyImage( - pData, // Destination buffer. - cbWidth, // Destination stride. - (BYTE*)imgIn, // First row in source image. - cbWidth, // Source stride. - cbWidth, // Image width in bytes. - VIDEO_HEIGHT // Image height in pixels. - );*/ for(int y = 0; y < height; y++) { - int x = 0; - for(x = 0; x < width; x++) + for(int x = 0; x < width; x++) { - BYTE *dstPx = &pData[x * BYTES_PER_TUPLE + y * cbWidth]; const BYTE *srcPx = (const BYTE *)&imgIn[x * 3 + y * width * 3]; dstPx[2] = srcPx[0]; //Red dstPx[1] = srcPx[1]; //Green dstPx[0] = srcPx[2]; //Blue - //dstPx[0] = 0xff; - //dstPx[1] = 0xff; - //dstPx[2] = 0xff; - //dstPx[3] = 0xff; //Alpha } } From d05e86c20e0eba46584745e79d06474348075ac2 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 16 Dec 2013 21:08:34 +0000 Subject: [PATCH 185/256] Use common pix format code for video writer --- mfvideooutfile.cpp | 28 +++++++++----- mfvideooutfile.h | 2 +- pixfmt.cpp | 96 +++++++++++++++++++++++++++++++++++----------- pixfmt.h | 10 +++++ 4 files changed, 104 insertions(+), 32 deletions(-) diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index 2a10955..9f88b2f 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -1,5 +1,6 @@ #include "mfvideooutfile.h" +#include "pixfmt.h" #include #include #include @@ -38,6 +39,7 @@ MfVideoOutFile::MfVideoOutFile(const char *fiName) : Base_Video_Out() this->pSinkWriter = NULL; this->streamIndex = 0; this->rtStart = 0; + this->pxFmt = "BGR24"; } MfVideoOutFile::~MfVideoOutFile() @@ -182,18 +184,26 @@ void MfVideoOutFile::SendFrame(const char *imgIn, unsigned imgLen, const char *p } if (SUCCEEDED(hr)) { - for(int y = 0; y < height; y++) + if(strcmp(this->pxFmt.c_str(), pxFmt)!=0) { - for(int x = 0; x < width; x++) - { - BYTE *dstPx = &pData[x * BYTES_PER_TUPLE + y * cbWidth]; - const BYTE *srcPx = (const BYTE *)&imgIn[x * 3 + y * width * 3]; - dstPx[2] = srcPx[0]; //Red - dstPx[1] = srcPx[1]; //Green - dstPx[0] = srcPx[2]; //Blue + unsigned int outBuffLen = cbBuffer; + DecodeAndResizeFrame((const unsigned char *)imgIn, imgLen, pxFmt, + width, height, + this->pxFmt.c_str(), + (unsigned char **)&pData, + &outBuffLen, + VIDEO_WIDTH, VIDEO_HEIGHT); + + //for(int i=0;i<10;i++) + // std::cout << (int)((pData)[i]) << std::endl; + } + else + { + DWORD cpyLen = imgLen; + if(cbBuffer < cpyLen) cpyLen = cbBuffer; + memcpy(pData, imgIn, cpyLen); } - } if (pBuffer) { diff --git a/mfvideooutfile.h b/mfvideooutfile.h index b298855..ef91c0e 100644 --- a/mfvideooutfile.h +++ b/mfvideooutfile.h @@ -32,7 +32,7 @@ class MfVideoOutFile : public Base_Video_Out DWORD streamIndex; LONGLONG rtStart; UINT64 rtDuration; - + std::string pxFmt; }; void *MfVideoOut_File_Worker_thread(void *arg); diff --git a/pixfmt.cpp b/pixfmt.cpp index 82b5d41..8bb27e3 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -5,6 +5,7 @@ #include #include #include +#include #include "pixfmt.h" // ********************************************************************* @@ -260,8 +261,14 @@ int ReadJpegFile(unsigned char * inbuffer, /* Step 3: read file parameters with jpeg_read_header() */ jpeg_read_header(&cinfo, TRUE); - *outBufferSize = cinfo.image_width * cinfo.image_height * cinfo.num_components; - *outBuffer = new unsigned char[*outBufferSize]; + unsigned int outBuffLen = cinfo.image_width * cinfo.image_height * cinfo.num_components; + if(*outBufferSize != 0 && *outBufferSize != outBuffLen) + throw std::runtime_error("Output buffer has incorrect size"); + if(*outBuffer == NULL) + { + *outBuffer = new unsigned char[*outBufferSize]; + } + *outBufferSize = outBuffLen; *widthOut = cinfo.image_width; *heightOut = cinfo.image_height; *channelsOut = cinfo.num_components; @@ -322,9 +329,15 @@ void ConvertRGBtoYUYVorSimilar(const unsigned char *im, unsigned sizeimage, { unsigned bytesperline = width * 2; unsigned padding = 0; + if(*outImSize != 0 && *outImSize != sizeimage) + throw std::runtime_error("Output buffer has incorrect size"); + unsigned char *outBuff = *outIm; + if(*outIm == NULL) + { + outBuff = new unsigned char [*outImSize]; + *outIm = outBuff; + } *outImSize = sizeimage+padding; - unsigned char *outBuff = new unsigned char [*outImSize]; - *outIm = outBuff; unsigned char *im2 = (unsigned char *)im; int uOffset = 0; @@ -363,8 +376,8 @@ void ConvertRGBtoYUYVorSimilar(const unsigned char *im, unsigned sizeimage, for(unsigned x=0;x< width;x+=2) { unsigned rgbOffset = width * y * 3 + x * 3; - outBuff[cursor+yOffset1] = im[rgbOffset] * 0.299 + im[rgbOffset+1] * 0.587 + im[rgbOffset+2] * 0.114; - outBuff[cursor+yOffset2] = im[rgbOffset+3] * 0.299 + im[rgbOffset+4] * 0.587 + im[rgbOffset+5] * 0.114; + outBuff[cursor+yOffset1] = (unsigned char)(im[rgbOffset] * 0.299 + im[rgbOffset+1] * 0.587 + im[rgbOffset+2] * 0.114 + 0.5); + outBuff[cursor+yOffset2] = (unsigned char)(im[rgbOffset+3] * 0.299 + im[rgbOffset+4] * 0.587 + im[rgbOffset+5] * 0.114 + 0.5); cursor += 4; } @@ -373,10 +386,10 @@ void ConvertRGBtoYUYVorSimilar(const unsigned char *im, unsigned sizeimage, for(unsigned x=0;x< width;x+=2) { unsigned rgbOffset = width * y * 3 + x * 3; - float Pb1 = im2[rgbOffset+0] * -0.168736 + im2[rgbOffset+1] * -0.331264 + im2[rgbOffset+2] * 0.5; - float Pb2 = im2[rgbOffset+3] * -0.168736 + im2[rgbOffset+4] * -0.331264 + im2[rgbOffset+5] * 0.5; + double Pb1 = im2[rgbOffset+0] * -0.168736 + im2[rgbOffset+1] * -0.331264 + im2[rgbOffset+2] * 0.5; + double Pb2 = im2[rgbOffset+3] * -0.168736 + im2[rgbOffset+4] * -0.331264 + im2[rgbOffset+5] * 0.5; - outBuff[cursor+uOffset] = 0.5 * (Pb1 + Pb2) + 128; + outBuff[cursor+uOffset] = (unsigned char)(0.5 * (Pb1 + Pb2) + 128.5); cursor += 4; } @@ -385,10 +398,10 @@ void ConvertRGBtoYUYVorSimilar(const unsigned char *im, unsigned sizeimage, for(unsigned x=0;x< width;x+=2) { unsigned rgbOffset = width * y * 3 + x * 3; - float Pr1 = im2[rgbOffset+0] * 0.5 + im2[rgbOffset+1] * -0.418688 + im2[rgbOffset+2] * -0.081312; - float Pr2 = im2[rgbOffset+3] * 0.5 + im2[rgbOffset+4] * -0.418688 + im2[rgbOffset+5] * -0.081312; + double Pr1 = im2[rgbOffset+0] * 0.5 + im2[rgbOffset+1] * -0.418688 + im2[rgbOffset+2] * -0.081312; + double Pr2 = im2[rgbOffset+3] * 0.5 + im2[rgbOffset+4] * -0.418688 + im2[rgbOffset+5] * -0.081312; - outBuff[cursor+vOffset] = 0.5 * (Pr1 + Pr2) + 128; + outBuff[cursor+vOffset] = (unsigned char)(0.5 * (Pr1 + Pr2) + 128.5); cursor += 4; } } @@ -402,15 +415,18 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, unsigned char **buffOut, unsigned *buffOutLen) { - //printf("rx %d %s\n", dataLen, inPxFmt); - *buffOut = NULL; - *buffOutLen = 0; - if(strcmp(inPxFmt, targetPxFmt) == 0) { //Conversion not required, return a shallow copy + if (*buffOutLen != 0 && *buffOutLen != dataLen) + { + throw std::runtime_error("Output buffer has incorrect size"); + } + if(*buffOut == NULL) + { + *buffOut = new unsigned char[dataLen]; + } *buffOutLen = dataLen; - *buffOut = new unsigned char[dataLen]; memcpy(*buffOut, data, dataLen); return 1; } @@ -451,9 +467,16 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, // Convert buffer from YUYV to RGB. // For the byte order, see: http://v4l2spec.bytesex.org/spec/r4339.htm // For the color conversion, see: http://v4l2spec.bytesex.org/spec/x2123.htm - *buffOutLen = dataLen * 6 / 4; - char *rgb = new char[*buffOutLen]; - *buffOut = (unsigned char*)rgb; + unsigned int outBuffLen = dataLen * 6 / 4; + if(*buffOutLen != 0 && *buffOutLen != outBuffLen) + throw std::runtime_error("Output buffer has incorrect length"); + *buffOutLen = outBuffLen; + char *rgb = (char*)*buffOut; + if(*buffOut == NULL) + { + rgb = new char[*buffOutLen]; + *buffOut = (unsigned char*)rgb; + } char *rgb_max = rgb + *buffOutLen; const unsigned char *yuyv = data; @@ -498,7 +521,10 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, //RGB24 -> BGR24 if(strcmp(inPxFmt,"RGB24")==0 && strcmp(targetPxFmt, "BGR24")==0) { - *buffOut = new unsigned char[dataLen]; + if(*buffOutLen != 0 && *buffOutLen != dataLen) + throw std::runtime_error("Output buffer has incorrect size"); + if(*buffOut == NULL) + *buffOut = new unsigned char[dataLen]; *buffOutLen = dataLen; for(unsigned i = 0; i+2 < dataLen; i+=3) { @@ -658,4 +684,30 @@ int CropToFitRgb24Image(const unsigned char *data, unsigned dataLen, } return 1; -} \ No newline at end of file +} + +//******************************************************************* + +int DecodeAndResizeFrame(const unsigned char *data, + unsigned dataLen, + const char *inPxFmt, + int srcWidth, int srcHeight, + const char *targetPxFmt, + unsigned char **buffOut, + unsigned *buffOutLen, + int dstWidth, + int dstHeight) +{ + if(srcWidth != dstWidth || srcHeight != dstHeight) + throw std::runtime_error("Resize not implemented yet"); + + int ret = DecodeFrame(data, dataLen, + inPxFmt, + srcWidth, srcHeight, + targetPxFmt, + buffOut, + buffOutLen); + + return ret; +} + diff --git a/pixfmt.h b/pixfmt.h index ffebee3..33d3bb6 100644 --- a/pixfmt.h +++ b/pixfmt.h @@ -11,6 +11,16 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, unsigned char **buffOut, unsigned *buffOutLen); +int DecodeAndResizeFrame(const unsigned char *data, + unsigned dataLen, + const char *inPxFmt, + int srcWidth, int srcHeight, + const char *targetPxFmt, + unsigned char **buffOut, + unsigned *buffOutLen, + int dstWidth, + int dstHeight); + int InsertHuffmanTableCTypes(const unsigned char* inBufferPtr, unsigned inBufferLen, std::string &outBuffer); int ResizeRgb24ImageNN(const unsigned char *data, unsigned dataLen, From a164c37bfebcd2e4460eb6fe0649f00e841e0458 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 16 Dec 2013 21:45:45 +0000 Subject: [PATCH 186/256] Add resize code but it does not work yet --- mfvideooutfile.cpp | 1 - pixfmt.cpp | 143 ++++++++++++++++++++++++++++++++++++++------- pixfmt.h | 13 ++++- videooutfile.py | 7 ++- 4 files changed, 137 insertions(+), 27 deletions(-) diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index 9f88b2f..885bd50 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -196,7 +196,6 @@ void MfVideoOutFile::SendFrame(const char *imgIn, unsigned imgLen, const char *p //for(int i=0;i<10;i++) // std::cout << (int)((pData)[i]) << std::endl; - } else { diff --git a/pixfmt.cpp b/pixfmt.cpp index 8bb27e3..d905548 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -620,15 +620,15 @@ int ResizeRgb24ImageNN(const unsigned char *data, unsigned dataLen, int widthIn, int heightIn, unsigned char *buffOut, unsigned buffOutLen, - int widthOut, int heightOut, int invertVertical) + int widthOut, int heightOut, int invertVertical, int tupleLen) { //Simple crop of image to target buffer for(int x = 0; x < widthOut; x++) { for(int y = 0; y < heightOut; y++) { - unsigned outOffset = x*3 + (y*3*widthOut); - if(outOffset + 3 >= buffOutLen) continue; + unsigned outOffset = x*tupleLen + (y*tupleLen*widthOut); + if(outOffset + tupleLen >= buffOutLen) continue; unsigned char *outPx = &buffOut[outOffset]; //Scale position @@ -641,13 +641,12 @@ int ResizeRgb24ImageNN(const unsigned char *data, unsigned dataLen, int row = inyi; if(invertVertical) row = heightIn - inyi - 1; - unsigned inOffset = inxi*3 + (row*3*widthIn); - if(inOffset + 3 >= dataLen) continue; + unsigned inOffset = inxi*tupleLen + (row*tupleLen*widthIn); + if(inOffset + tupleLen >= dataLen) continue; const unsigned char *inPx = &data[inOffset]; - outPx[0] = inPx[0]; - outPx[1] = inPx[1]; - outPx[2] = inPx[2]; + for(int c = 0; c < tupleLen; c++) + outPx[c] = inPx[c]; } } @@ -659,28 +658,26 @@ int CropToFitRgb24Image(const unsigned char *data, unsigned dataLen, int widthIn, int heightIn, unsigned char *buffOut, unsigned buffOutLen, - int widthOut, int heightOut, int invertVertical) + int widthOut, int heightOut, int invertVertical, int tupleLen = 3) { //Simple crop of image to target buffer for(int x = 0; x < widthOut; x++) { for(int y = 0; y < heightOut; y++) { - unsigned outOffset = x*3 + (y*3*widthOut); - if(outOffset + 3 >= buffOutLen) continue; + unsigned outOffset = x*tupleLen + (y*tupleLen*widthOut); + if(outOffset + tupleLen >= buffOutLen) continue; unsigned char *outPx = &buffOut[outOffset]; int row = y; if(invertVertical) row = heightIn - y - 1; - unsigned inOffset = x*3 + (row*3*widthIn); - if(inOffset + 3 >= dataLen) continue; + unsigned inOffset = x*tupleLen + (row*tupleLen*widthIn); + if(inOffset + tupleLen >= dataLen) continue; const unsigned char *inPx = &data[inOffset]; - outPx[0] = inPx[0]; - outPx[1] = inPx[1]; - outPx[2] = inPx[2]; + for(int c = 0; c < tupleLen; c++) + outPx[c] = inPx[c]; } - } return 1; @@ -698,16 +695,118 @@ int DecodeAndResizeFrame(const unsigned char *data, int dstWidth, int dstHeight) { - if(srcWidth != dstWidth || srcHeight != dstHeight) - throw std::runtime_error("Resize not implemented yet"); + if(srcWidth==dstWidth && srcHeight==dstHeight) + { + //Resize is not required + int ret = DecodeFrame(data, dataLen, + inPxFmt, + srcWidth, srcHeight, + targetPxFmt, + buffOut, + buffOutLen); + return ret; + } + + const unsigned char *currentImg = data; + unsigned currentLen = dataLen; + std::string currentPxFmt = inPxFmt; + int currentWidth = srcWidth; + int currentHeight = srcHeight; + + unsigned char *tmpBuff = NULL; + unsigned tmpBuffLen = 0; + int resizeRet = ResizeFrame(currentImg, + currentLen, + currentPxFmt.c_str(), + currentWidth, currentHeight, + &tmpBuff, + &tmpBuffLen, + dstWidth, + dstHeight); + + if(resizeRet > 0) + { + //Resize succeeded + currentImg = tmpBuff; + currentLen = tmpBuffLen; + currentWidth = dstWidth; + currentHeight = dstHeight; + + int decodeRet = DecodeFrame(currentImg, currentLen, + currentPxFmt.c_str(), + currentWidth, currentHeight, + targetPxFmt, + buffOut, + buffOutLen); + + //Free intermediate buff + if(tmpBuff != NULL) + { + delete [] tmpBuff; + tmpBuff = NULL; + } + + return decodeRet; + } - int ret = DecodeFrame(data, dataLen, + //Attempt to convert pixel format first, do resize later + tmpBuff = NULL; + tmpBuffLen = 0; + int decodeRet = DecodeFrame(data, dataLen, inPxFmt, srcWidth, srcHeight, targetPxFmt, + &tmpBuff, + &tmpBuffLen); + + if(decodeRet <= 0) + return 0; //Conversion failed + + //Now resize + resizeRet = ResizeFrame(tmpBuff, + tmpBuffLen, + targetPxFmt, + srcWidth, srcHeight, buffOut, - buffOutLen); + buffOutLen, + dstWidth, + dstHeight); - return ret; + //Free intermediate buff + if(tmpBuff != NULL) + { + delete [] tmpBuff; + tmpBuff = NULL; + } + + return resizeRet; } +int ResizeFrame(const unsigned char *data, + unsigned dataLen, + const char *pxFmt, + int srcWidth, int srcHeight, + unsigned char **buffOut, + unsigned *buffOutLen, + int dstWidth, + int dstHeight) +{ + if(strcmp(pxFmt,"RGB24")!=0 && strcmp(pxFmt,"BGR24")!=0) + { + //Allocate new buffer if needed + int dstBuffSize = 3 * dstWidth * dstHeight; + if(*buffOutLen != 0 && *buffOutLen != dstBuffSize) + throw std::runtime_error("Output buffer has incorrect size"); + *buffOutLen = dstBuffSize; + if(*buffOut == NULL) + *buffOut = new unsigned char [*buffOutLen]; + + return ResizeRgb24ImageNN(data, dataLen, + srcWidth, srcHeight, + *buffOut, + *buffOutLen, + dstWidth, dstHeight, 0, 3); + } + //Not supported + return 0; +} diff --git a/pixfmt.h b/pixfmt.h index 33d3bb6..fad783c 100644 --- a/pixfmt.h +++ b/pixfmt.h @@ -21,13 +21,22 @@ int DecodeAndResizeFrame(const unsigned char *data, int dstWidth, int dstHeight); -int InsertHuffmanTableCTypes(const unsigned char* inBufferPtr, unsigned inBufferLen, std::string &outBuffer); +int ResizeFrame(const unsigned char *data, + unsigned dataLen, + const char *pxFmt, + int srcWidth, int srcHeight, + unsigned char **buffOut, + unsigned *buffOutLen, + int dstWidth, + int dstHeight); int ResizeRgb24ImageNN(const unsigned char *data, unsigned dataLen, int widthIn, int heightIn, unsigned char *buffOut, unsigned buffOutLen, - int widthOut, int heightOut, int invertVertical); + int widthOut, int heightOut, int invertVertical = 0, int tupleLen = 3); + +int InsertHuffmanTableCTypes(const unsigned char* inBufferPtr, unsigned inBufferLen, std::string &outBuffer); #endif //_PIXFMT_H_ diff --git a/videooutfile.py b/videooutfile.py index 0865cc5..cbfd295 100644 --- a/videooutfile.py +++ b/videooutfile.py @@ -8,7 +8,10 @@ outManager.open("test.wmv", "RGB24", 640, 480) - imgLen = 640 * 480 * 3 + w = 800 + h = 600 + + imgLen = w * h * 3 img = np.ones(shape=(imgLen,), dtype=np.uint8) * 0 for i in range(imgLen): if (i % 3) == 0: @@ -23,7 +26,7 @@ # img[i] = 128 print "Frame", frNum - outManager.send_frame("test.wmv", str(img.tostring()), "RGB24", 640, 480) + outManager.send_frame("test.wmv", str(img.tostring()), "RGB24", w, h) time.sleep(0.01) From 4af5fe20ddccfa2d1e29be952fbb29dd06f3f282 Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 16 Dec 2013 21:53:31 +0000 Subject: [PATCH 187/256] Fixed resize code --- mfvideooutfile.cpp | 5 +++-- pixfmt.cpp | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index 885bd50..c9ac74a 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -186,6 +186,8 @@ void MfVideoOutFile::SendFrame(const char *imgIn, unsigned imgLen, const char *p { if(strcmp(this->pxFmt.c_str(), pxFmt)!=0) { + //std::cout << (long) pData << std::endl; + unsigned int outBuffLen = cbBuffer; DecodeAndResizeFrame((const unsigned char *)imgIn, imgLen, pxFmt, width, height, @@ -194,8 +196,7 @@ void MfVideoOutFile::SendFrame(const char *imgIn, unsigned imgLen, const char *p &outBuffLen, VIDEO_WIDTH, VIDEO_HEIGHT); - //for(int i=0;i<10;i++) - // std::cout << (int)((pData)[i]) << std::endl; + //std::cout << (long) pData << std::endl; } else { diff --git a/pixfmt.cpp b/pixfmt.cpp index d905548..5751e9f 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -791,7 +791,7 @@ int ResizeFrame(const unsigned char *data, int dstWidth, int dstHeight) { - if(strcmp(pxFmt,"RGB24")!=0 && strcmp(pxFmt,"BGR24")!=0) + if(strcmp(pxFmt,"RGB24")==0 || strcmp(pxFmt,"BGR24")==0) { //Allocate new buffer if needed int dstBuffSize = 3 * dstWidth * dstHeight; From 1092f92b2e1cd34c2e47dc2860225f1608cf4a49 Mon Sep 17 00:00:00 2001 From: TimSC Date: Tue, 17 Dec 2013 00:38:30 +0000 Subject: [PATCH 188/256] Make static video out variables class members --- mfvideooutfile.cpp | 34 +++++++++++++++++++++------------- mfvideooutfile.h | 8 +++++--- 2 files changed, 26 insertions(+), 16 deletions(-) diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index c9ac74a..5427c41 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -16,14 +16,9 @@ template void SafeRelease(T **ppT) } } -const UINT32 VIDEO_WIDTH = 640; -const UINT32 VIDEO_HEIGHT = 480; const UINT32 VIDEO_FPS = 25; -const UINT32 VIDEO_BIT_RATE = 800000; const GUID VIDEO_ENCODING_FORMAT = MFVideoFormat_WMV3; const GUID VIDEO_INPUT_FORMAT = MFVideoFormat_RGB24; -const UINT32 VIDEO_PELS = VIDEO_WIDTH * VIDEO_HEIGHT; -const UINT32 VIDEO_FRAME_COUNT = 20 * VIDEO_FPS; const UINT32 BYTES_PER_TUPLE = 3; MfVideoOutFile::MfVideoOutFile(const char *fiName) : Base_Video_Out() @@ -40,6 +35,10 @@ MfVideoOutFile::MfVideoOutFile(const char *fiName) : Base_Video_Out() this->streamIndex = 0; this->rtStart = 0; this->pxFmt = "BGR24"; + + this->outputWidth = 640; + this->outputHeight = 480; + this->bitRate = 800000; } MfVideoOutFile::~MfVideoOutFile() @@ -80,7 +79,7 @@ void MfVideoOutFile::OpenFile() } if (SUCCEEDED(hr)) { - hr = pMediaTypeOut->SetUINT32(MF_MT_AVG_BITRATE, VIDEO_BIT_RATE); + hr = pMediaTypeOut->SetUINT32(MF_MT_AVG_BITRATE, this->bitRate); } if (SUCCEEDED(hr)) @@ -89,7 +88,7 @@ void MfVideoOutFile::OpenFile() } if (SUCCEEDED(hr)) { - hr = MFSetAttributeSize(pMediaTypeOut, MF_MT_FRAME_SIZE, VIDEO_WIDTH, VIDEO_HEIGHT); + hr = MFSetAttributeSize(pMediaTypeOut, MF_MT_FRAME_SIZE, this->outputWidth, this->outputHeight); } if (SUCCEEDED(hr)) { @@ -125,7 +124,7 @@ void MfVideoOutFile::OpenFile() } if (SUCCEEDED(hr)) { - hr = MFSetAttributeSize(pMediaTypeIn, MF_MT_FRAME_SIZE, VIDEO_WIDTH, VIDEO_HEIGHT); + hr = MFSetAttributeSize(pMediaTypeIn, MF_MT_FRAME_SIZE, this->outputWidth, this->outputHeight); } if (SUCCEEDED(hr)) { @@ -166,8 +165,8 @@ void MfVideoOutFile::SendFrame(const char *imgIn, unsigned imgLen, const char *p IMFSample *pSample = NULL; IMFMediaBuffer *pBuffer = NULL; - const LONG cbWidth = BYTES_PER_TUPLE * VIDEO_WIDTH; - const DWORD cbBuffer = cbWidth * VIDEO_HEIGHT; + const LONG cbWidth = BYTES_PER_TUPLE * this->outputWidth; + const DWORD cbBuffer = cbWidth * this->outputHeight; if(this->pSinkWriter == NULL) this->OpenFile(); @@ -194,7 +193,7 @@ void MfVideoOutFile::SendFrame(const char *imgIn, unsigned imgLen, const char *p this->pxFmt.c_str(), (unsigned char **)&pData, &outBuffLen, - VIDEO_WIDTH, VIDEO_HEIGHT); + this->outputWidth, this->outputHeight); //std::cout << (long) pData << std::endl; } @@ -260,12 +259,21 @@ int MfVideoOutFile::WaitForStop() void MfVideoOutFile::SetOutputSize(int width, int height) { - + if(this->pSinkWriter != NULL) + { + throw std::runtime_error("Set video size before opening video file"); + } + this->outputWidth = width; + this->outputHeight = height; } void MfVideoOutFile::SetOutputPxFmt(const char *fmt) { - + if(this->pSinkWriter != NULL) + { + throw std::runtime_error("Set video format before opening video file"); + } + this->pxFmt = fmt; } void MfVideoOutFile::Run() diff --git a/mfvideooutfile.h b/mfvideooutfile.h index ef91c0e..b3944e7 100644 --- a/mfvideooutfile.h +++ b/mfvideooutfile.h @@ -26,13 +26,15 @@ class MfVideoOutFile : public Base_Video_Out void Run(); - protected: - IMFSinkWriter *pSinkWriter; - DWORD streamIndex; + IMFSinkWriter *pSinkWriter; + DWORD streamIndex; LONGLONG rtStart; UINT64 rtDuration; std::string pxFmt; + + int outputWidth, outputHeight; + UINT32 bitRate; }; void *MfVideoOut_File_Worker_thread(void *arg); From 611fbc37b14e0283a6803024b21141f9486cecbb Mon Sep 17 00:00:00 2001 From: TimSC Date: Tue, 17 Dec 2013 01:05:10 +0000 Subject: [PATCH 189/256] Only bgr colour space is supported but code running slowly --- mfvideooutfile.cpp | 12 ++++++++---- mfvideooutfile.h | 1 + videooutfile.py | 2 +- 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index 5427c41..df6dfb6 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -18,7 +18,6 @@ template void SafeRelease(T **ppT) const UINT32 VIDEO_FPS = 25; const GUID VIDEO_ENCODING_FORMAT = MFVideoFormat_WMV3; -const GUID VIDEO_INPUT_FORMAT = MFVideoFormat_RGB24; const UINT32 BYTES_PER_TUPLE = 3; MfVideoOutFile::MfVideoOutFile(const char *fiName) : Base_Video_Out() @@ -62,7 +61,8 @@ void MfVideoOutFile::OpenFile() IMFMediaType *pMediaTypeIn = NULL; MFFrameRateToAverageTimePerFrame(VIDEO_FPS, 1, &this->rtDuration); - HRESULT hr = MFCreateSinkWriterFromURL(L"output.wmv", NULL, NULL, &pSinkWriter); + this->fina = L"output.wmv"; + HRESULT hr = MFCreateSinkWriterFromURL(this->fina.c_str(), NULL, NULL, &pSinkWriter); // Set the output media type. if (SUCCEEDED(hr)) @@ -75,7 +75,8 @@ void MfVideoOutFile::OpenFile() } if (SUCCEEDED(hr)) { - hr = pMediaTypeOut->SetGUID(MF_MT_SUBTYPE, VIDEO_ENCODING_FORMAT); + if(strcmp(this->pxFmt.c_str(), "BGR24")==0) + hr = pMediaTypeOut->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB24); } if (SUCCEEDED(hr)) { @@ -116,7 +117,8 @@ void MfVideoOutFile::OpenFile() if (SUCCEEDED(hr)) { - hr = pMediaTypeIn->SetGUID(MF_MT_SUBTYPE, VIDEO_INPUT_FORMAT); + if(strcmp(this->pxFmt.c_str(), "BGR24")==0) + hr = pMediaTypeIn->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB24); } if (SUCCEEDED(hr)) { @@ -273,6 +275,8 @@ void MfVideoOutFile::SetOutputPxFmt(const char *fmt) { throw std::runtime_error("Set video format before opening video file"); } + if(strcmp(fmt,"BGR24")!=0) + throw std::runtime_error("Only BGR24 is supported"); this->pxFmt = fmt; } diff --git a/mfvideooutfile.h b/mfvideooutfile.h index b3944e7..ac3e46d 100644 --- a/mfvideooutfile.h +++ b/mfvideooutfile.h @@ -32,6 +32,7 @@ class MfVideoOutFile : public Base_Video_Out LONGLONG rtStart; UINT64 rtDuration; std::string pxFmt; + std::wstring fina; int outputWidth, outputHeight; UINT32 bitRate; diff --git a/videooutfile.py b/videooutfile.py index cbfd295..b81b5db 100644 --- a/videooutfile.py +++ b/videooutfile.py @@ -6,7 +6,7 @@ outManager = videolive.Video_out_file_manager() print outManager - outManager.open("test.wmv", "RGB24", 640, 480) + outManager.open("test.wmv", "BGR24", 640, 480) w = 800 h = 600 From e4ebd4c83b0c1366ff35761b02d3d3b721427a6e Mon Sep 17 00:00:00 2001 From: TimSC Date: Tue, 17 Dec 2013 01:22:47 +0000 Subject: [PATCH 190/256] Fix speed but still investigating --- mfvideooutfile.cpp | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index df6dfb6..45f90b5 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -18,6 +18,7 @@ template void SafeRelease(T **ppT) const UINT32 VIDEO_FPS = 25; const GUID VIDEO_ENCODING_FORMAT = MFVideoFormat_WMV3; +const GUID VIDEO_INPUT_FORMAT = MFVideoFormat_RGB24; const UINT32 BYTES_PER_TUPLE = 3; MfVideoOutFile::MfVideoOutFile(const char *fiName) : Base_Video_Out() @@ -75,8 +76,7 @@ void MfVideoOutFile::OpenFile() } if (SUCCEEDED(hr)) { - if(strcmp(this->pxFmt.c_str(), "BGR24")==0) - hr = pMediaTypeOut->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB24); + hr = pMediaTypeOut->SetGUID(MF_MT_SUBTYPE, VIDEO_ENCODING_FORMAT); } if (SUCCEEDED(hr)) { @@ -117,8 +117,7 @@ void MfVideoOutFile::OpenFile() if (SUCCEEDED(hr)) { - if(strcmp(this->pxFmt.c_str(), "BGR24")==0) - hr = pMediaTypeIn->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB24); + hr = pMediaTypeIn->SetGUID(MF_MT_SUBTYPE, VIDEO_INPUT_FORMAT); } if (SUCCEEDED(hr)) { From f33ad05fcebbc51d3fd7d10d35397509fdbaf07c Mon Sep 17 00:00:00 2001 From: TimSC Date: Tue, 17 Dec 2013 01:40:18 +0000 Subject: [PATCH 191/256] Set filename of video in constructor --- mfvideooutfile.cpp | 19 ++++++++++++++----- videoin.cpp | 2 +- 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index 45f90b5..e4dc438 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -16,9 +16,18 @@ template void SafeRelease(T **ppT) } } +std::wstring CStringToWString(const char *inStr) +{ + wchar_t *tmpDevName = new wchar_t[strlen(inStr)+1]; + size_t returnValue; + + mbstowcs_s(&returnValue, tmpDevName, strlen(inStr)+1, inStr, strlen(inStr)+1); + std::wstring tmpDevName2(tmpDevName); + delete [] tmpDevName; + return tmpDevName2; +} + const UINT32 VIDEO_FPS = 25; -const GUID VIDEO_ENCODING_FORMAT = MFVideoFormat_WMV3; -const GUID VIDEO_INPUT_FORMAT = MFVideoFormat_RGB24; const UINT32 BYTES_PER_TUPLE = 3; MfVideoOutFile::MfVideoOutFile(const char *fiName) : Base_Video_Out() @@ -39,6 +48,7 @@ MfVideoOutFile::MfVideoOutFile(const char *fiName) : Base_Video_Out() this->outputWidth = 640; this->outputHeight = 480; this->bitRate = 800000; + this->fina = CStringToWString(fiName); } MfVideoOutFile::~MfVideoOutFile() @@ -62,7 +72,6 @@ void MfVideoOutFile::OpenFile() IMFMediaType *pMediaTypeIn = NULL; MFFrameRateToAverageTimePerFrame(VIDEO_FPS, 1, &this->rtDuration); - this->fina = L"output.wmv"; HRESULT hr = MFCreateSinkWriterFromURL(this->fina.c_str(), NULL, NULL, &pSinkWriter); // Set the output media type. @@ -76,7 +85,7 @@ void MfVideoOutFile::OpenFile() } if (SUCCEEDED(hr)) { - hr = pMediaTypeOut->SetGUID(MF_MT_SUBTYPE, VIDEO_ENCODING_FORMAT); + hr = pMediaTypeOut->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_WMV3); } if (SUCCEEDED(hr)) { @@ -117,7 +126,7 @@ void MfVideoOutFile::OpenFile() if (SUCCEEDED(hr)) { - hr = pMediaTypeIn->SetGUID(MF_MT_SUBTYPE, VIDEO_INPUT_FORMAT); + hr = pMediaTypeIn->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB24); } if (SUCCEEDED(hr)) { diff --git a/videoin.cpp b/videoin.cpp index 6be3922..1cfaa8a 100644 --- a/videoin.cpp +++ b/videoin.cpp @@ -57,7 +57,7 @@ PyObject *Device_manager_open(Device_manager *self, PyObject *args) #ifdef _NT wchar_t *tmpDevName = new wchar_t[strlen(devarg)+1]; size_t returnValue; - //returnValue = mbstowcs(tmpDevName, devarg, strlen(devarg)+1); + mbstowcs_s(&returnValue, tmpDevName, strlen(devarg)+1, devarg, strlen(devarg)+1); std::wstring tmpDevName2(tmpDevName); delete [] tmpDevName; From 8d8be490796213c8b5979fa129736f42ad1de0ac Mon Sep 17 00:00:00 2001 From: TimSC Date: Tue, 17 Dec 2013 01:46:52 +0000 Subject: [PATCH 192/256] Disable frame rate --- mfvideooutfile.cpp | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index e4dc438..9bb8a8a 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -27,7 +27,7 @@ std::wstring CStringToWString(const char *inStr) return tmpDevName2; } -const UINT32 VIDEO_FPS = 25; +//const UINT32 VIDEO_FPS = 25; const UINT32 BYTES_PER_TUPLE = 3; MfVideoOutFile::MfVideoOutFile(const char *fiName) : Base_Video_Out() @@ -100,10 +100,10 @@ void MfVideoOutFile::OpenFile() { hr = MFSetAttributeSize(pMediaTypeOut, MF_MT_FRAME_SIZE, this->outputWidth, this->outputHeight); } - if (SUCCEEDED(hr)) - { - hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_FRAME_RATE, VIDEO_FPS, 1); - } + //if (SUCCEEDED(hr)) + //{ + // hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_FRAME_RATE, VIDEO_FPS, 1); + //} if (SUCCEEDED(hr)) { hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_PIXEL_ASPECT_RATIO, 1, 1); From 226128ff2984585a63efa2a3ff8cfe0fdfc415e2 Mon Sep 17 00:00:00 2001 From: TimSC Date: Tue, 17 Dec 2013 02:00:18 +0000 Subject: [PATCH 193/256] Lena test image works fine --- mfvideooutfile.cpp | 27 +++++++++++++++++++-------- mfvideooutfile.h | 3 ++- videooutfile.py | 23 +++++++++++++---------- 3 files changed, 34 insertions(+), 19 deletions(-) diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index 9bb8a8a..4c8e6dc 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -27,7 +27,6 @@ std::wstring CStringToWString(const char *inStr) return tmpDevName2; } -//const UINT32 VIDEO_FPS = 25; const UINT32 BYTES_PER_TUPLE = 3; MfVideoOutFile::MfVideoOutFile(const char *fiName) : Base_Video_Out() @@ -49,6 +48,7 @@ MfVideoOutFile::MfVideoOutFile(const char *fiName) : Base_Video_Out() this->outputHeight = 480; this->bitRate = 800000; this->fina = CStringToWString(fiName); + this->forceFrameRateFps = 25; } MfVideoOutFile::~MfVideoOutFile() @@ -70,7 +70,9 @@ void MfVideoOutFile::OpenFile() this->rtStart = 0; IMFMediaType *pMediaTypeOut = NULL; IMFMediaType *pMediaTypeIn = NULL; - MFFrameRateToAverageTimePerFrame(VIDEO_FPS, 1, &this->rtDuration); + this->rtDuration = 1; + if(this->forceFrameRateFps > 0) + MFFrameRateToAverageTimePerFrame(this->forceFrameRateFps, 1, &this->rtDuration); HRESULT hr = MFCreateSinkWriterFromURL(this->fina.c_str(), NULL, NULL, &pSinkWriter); @@ -100,10 +102,10 @@ void MfVideoOutFile::OpenFile() { hr = MFSetAttributeSize(pMediaTypeOut, MF_MT_FRAME_SIZE, this->outputWidth, this->outputHeight); } - //if (SUCCEEDED(hr)) - //{ - // hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_FRAME_RATE, VIDEO_FPS, 1); - //} + if (SUCCEEDED(hr) && this->forceFrameRateFps > 0) + { + hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_FRAME_RATE, this->forceFrameRateFps, 1); + } if (SUCCEEDED(hr)) { hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_PIXEL_ASPECT_RATIO, 1, 1); @@ -136,9 +138,9 @@ void MfVideoOutFile::OpenFile() { hr = MFSetAttributeSize(pMediaTypeIn, MF_MT_FRAME_SIZE, this->outputWidth, this->outputHeight); } - if (SUCCEEDED(hr)) + if (SUCCEEDED(hr) && this->forceFrameRateFps > 0) { - hr = MFSetAttributeRatio(pMediaTypeIn, MF_MT_FRAME_RATE, VIDEO_FPS, 1); + hr = MFSetAttributeRatio(pMediaTypeIn, MF_MT_FRAME_RATE, this->forceFrameRateFps, 1); } if (SUCCEEDED(hr)) { @@ -288,6 +290,15 @@ void MfVideoOutFile::SetOutputPxFmt(const char *fmt) this->pxFmt = fmt; } +void MfVideoOutFile::SetFrameRate(UINT32 frameRateIn) +{ + if(this->pSinkWriter != NULL) + { + throw std::runtime_error("Set video parameters before opening video file"); + } + this->forceFrameRateFps = frameRateIn; +} + void MfVideoOutFile::Run() { diff --git a/mfvideooutfile.h b/mfvideooutfile.h index ac3e46d..cf3476f 100644 --- a/mfvideooutfile.h +++ b/mfvideooutfile.h @@ -23,6 +23,7 @@ class MfVideoOutFile : public Base_Video_Out virtual void SetOutputSize(int width, int height); virtual void SetOutputPxFmt(const char *fmt); + virtual void SetFrameRate(UINT32 frameRateIn); void Run(); @@ -35,7 +36,7 @@ class MfVideoOutFile : public Base_Video_Out std::wstring fina; int outputWidth, outputHeight; - UINT32 bitRate; + UINT32 bitRate, forceFrameRateFps; }; void *MfVideoOut_File_Worker_thread(void *arg); diff --git a/videooutfile.py b/videooutfile.py index b81b5db..6ad02e9 100644 --- a/videooutfile.py +++ b/videooutfile.py @@ -1,23 +1,26 @@ import videolive, time, random import numpy as np +import scipy.misc as misc if __name__=="__main__": outManager = videolive.Video_out_file_manager() print outManager - outManager.open("test.wmv", "BGR24", 640, 480) + lena = misc.imread("Lenna.png") + print lena.shape + w = lena.shape[1] + h = lena.shape[0] - w = 800 - h = 600 + outManager.open("test.wmv", "BGR24", 640, 480) imgLen = w * h * 3 - img = np.ones(shape=(imgLen,), dtype=np.uint8) * 0 - for i in range(imgLen): - if (i % 3) == 0: - img[i] = 0xff - if (i % 3) == 1: - img[i] = random.randint(0,255) + #img = np.ones(shape=(imgLen,), dtype=np.uint8) * 0 + #for i in range(imgLen): + # if (i % 3) == 0: + # img[i] = 0xff + # if (i % 3) == 1: + # img[i] = random.randint(0,255) for frNum in range(200): #img = np.random.randint(0, 255, size=(imgLen,)) @@ -26,7 +29,7 @@ # img[i] = 128 print "Frame", frNum - outManager.send_frame("test.wmv", str(img.tostring()), "RGB24", w, h) + outManager.send_frame("test.wmv", str(lena.tostring()), "RGB24", w, h) time.sleep(0.01) From d925b84379bbec321703c9f6708d20e1fd5f70ea Mon Sep 17 00:00:00 2001 From: TimSC Date: Tue, 17 Dec 2013 02:16:23 +0000 Subject: [PATCH 194/256] Set framerate via python --- base.h | 1 + libvideolive.cpp | 9 ++++++--- videooutfile.cpp | 34 +++++++++++++++++++++++++++++++++- videooutfile.h | 2 +- videooutfile.py | 2 +- 5 files changed, 42 insertions(+), 6 deletions(-) diff --git a/base.h b/base.h index 205f8c5..5c83132 100644 --- a/base.h +++ b/base.h @@ -75,6 +75,7 @@ class Base_Video_Out virtual int WaitForStop() {return 1;}; virtual void SetOutputSize(int width, int height) {}; virtual void SetOutputPxFmt(const char *fmt) {}; + virtual void SetFrameRate(UINT32 frameRateIn) {}; void Run() {}; }; diff --git a/libvideolive.cpp b/libvideolive.cpp index 7a17662..24b317f 100644 --- a/libvideolive.cpp +++ b/libvideolive.cpp @@ -128,14 +128,17 @@ static PyTypeObject Video_out_manager_type = { static PyMethodDef Video_out_file_manager_methods[] = { {"open", (PyCFunction)Video_out_file_manager_open, METH_VARARGS, - "open(filename = '\\dev\\video0', pixel_format, width, height)\n\n" + "open(filename = 'out.wmv', pixel_format, width, height)\n\n" "Open video output."}, {"send_frame", (PyCFunction)Video_out_file_manager_Send_frame, METH_VARARGS, - "send_frame(dev = '\\dev\\video0', img, pixel_format, width, height)\n\n" + "send_frame(filename = 'out.wmv', img, pixel_format, width, height)\n\n" "Send frame to video stream output."}, {"close", (PyCFunction)Video_out_file_manager_close, METH_VARARGS, - "close(dev = '\\dev\\video0')\n\n" + "close(filename = 'out.wmv')\n\n" "Close video device. Subsequent calls to other methods will fail."}, + {"set_frame_rate", (PyCFunction)Video_out_file_manager_Set_Frame_Rate, METH_VARARGS, + "set_frame_rate(filename = 'out.wmv', frame_rate)\n\n" + "Set output frame rate. Use frame rate of 0 for real time frame processing."}, {NULL} }; diff --git a/videooutfile.cpp b/videooutfile.cpp index 3059a67..438f6da 100644 --- a/videooutfile.cpp +++ b/videooutfile.cpp @@ -34,7 +34,7 @@ void Video_out_file_manager_dealloc(Video_out_file_manager *self) PyObject *Video_out_file_manager_open(Video_out_file_manager *self, PyObject *args) { - std::cout << "Video_out_manager_open" << std::endl; + std::cout << "Video_out_file_manager_open" << std::endl; //Process arguments const char *devarg = NULL; @@ -144,3 +144,35 @@ PyObject *Video_out_file_manager_close(Video_out_file_manager *self, PyObject *a Py_RETURN_NONE; } +PyObject *Video_out_file_manager_Set_Frame_Rate(Video_out_file_manager *self, PyObject *args) +{ + //Process arguments + const char *devarg = NULL; + int frameRate = 0; + + if(PyObject_Length(args) < 2) + { + PyErr_Format(PyExc_RuntimeError, "Too few arguments."); + Py_RETURN_NONE; + } + + PyObject *pydev = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydev); + + PyObject *pyFrameRate = PyTuple_GetItem(args, 1); + frameRate = PyInt_AsLong(pyFrameRate); + + std::map::iterator it = self->threads->find(devarg); + + if(it != self->threads->end()) + { + it->second->SetFrameRate(frameRate); + } + else + { + PyErr_Format(PyExc_RuntimeError, "Device not found."); + Py_RETURN_NONE; + } + + Py_RETURN_NONE; +} diff --git a/videooutfile.h b/videooutfile.h index 2de4919..274f93d 100644 --- a/videooutfile.h +++ b/videooutfile.h @@ -22,7 +22,7 @@ void Video_out_file_manager_dealloc(Video_out_file_manager *self); PyObject *Video_out_file_manager_open(Video_out_file_manager *self, PyObject *args); PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObject *args); PyObject *Video_out_file_manager_close(Video_out_file_manager *self, PyObject *args); - +PyObject *Video_out_file_manager_Set_Frame_Rate(Video_out_file_manager *self, PyObject *args); #endif //VIDEOOUTFILE_H diff --git a/videooutfile.py b/videooutfile.py index 6ad02e9..7896c3b 100644 --- a/videooutfile.py +++ b/videooutfile.py @@ -13,7 +13,7 @@ h = lena.shape[0] outManager.open("test.wmv", "BGR24", 640, 480) - + outManager.set_frame_rate("test.wmv", 5) imgLen = w * h * 3 #img = np.ones(shape=(imgLen,), dtype=np.uint8) * 0 #for i in range(imgLen): From cd49b8c5403670768259205165620b6a70ec99b5 Mon Sep 17 00:00:00 2001 From: TimSC Date: Tue, 17 Dec 2013 11:14:27 +0000 Subject: [PATCH 195/256] Add real time clock to video out --- mfvideooutfile.cpp | 133 +++++++++++++++++++++++++++++++++++++++------ mfvideooutfile.h | 5 ++ 2 files changed, 122 insertions(+), 16 deletions(-) diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index 4c8e6dc..2b3de45 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -16,6 +16,36 @@ template void SafeRelease(T **ppT) } } +FILETIME GetTimeNow() +{ + SYSTEMTIME systime; + GetSystemTime(&systime); + FILETIME time; + SystemTimeToFileTime(&systime, &time); + return time; +} + +double SubtractTimes(FILETIME first, FILETIME second) +{ + LONGLONG diffInTicks = + reinterpret_cast(&first)->QuadPart - + reinterpret_cast(&second)->QuadPart; + double diffInSec = diffInTicks / (double)1e7; + return diffInSec; +} + +void SetTimeToZero(FILETIME &t) +{ + t.dwLowDateTime = 0; + t.dwHighDateTime = 0; +} + +bool TimeIsZero(FILETIME &t) +{ + if (t.dwLowDateTime != 0) return 0; + return t.dwHighDateTime == 0; +} + std::wstring CStringToWString(const char *inStr) { wchar_t *tmpDevName = new wchar_t[strlen(inStr)+1]; @@ -48,13 +78,13 @@ MfVideoOutFile::MfVideoOutFile(const char *fiName) : Base_Video_Out() this->outputHeight = 480; this->bitRate = 800000; this->fina = CStringToWString(fiName); - this->forceFrameRateFps = 25; + this->forceFrameRateFps = 0; + this->prevFrameDuration = 0; + SetTimeToZero(this->startVideoTime); } MfVideoOutFile::~MfVideoOutFile() { - this->CloseFile(); - MFShutdown(); CoUninitialize(); @@ -164,6 +194,8 @@ void MfVideoOutFile::OpenFile() void MfVideoOutFile::CloseFile() { + this->CopyFromBufferToOutFile(1); + if(this->pSinkWriter != NULL) { HRESULT hr = this->pSinkWriter->Finalize(); @@ -173,16 +205,69 @@ void MfVideoOutFile::CloseFile() void MfVideoOutFile::SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) { - + + if(this->pSinkWriter == NULL) + this->OpenFile(); + + FILETIME timeNow = GetTimeNow(); + if(TimeIsZero(this->startVideoTime)) + { + this->startVideoTime = timeNow; + } + + //Time since video start + unsigned long elapseSec = 0; + unsigned long elapseUSec = 0; + if(this->forceFrameRateFps > 0) + { + //Fixed frame rate + elapseSec = (unsigned long)(this->rtStart / 1e7); + elapseUSec = (unsigned long)((this->rtStart - elapseSec * 1e7)/10. + 0.5); + this->rtStart += this->rtDuration; + } + else + { + //Real time frames + double elapse = SubtractTimes(timeNow, this->startVideoTime); + elapseSec = (unsigned long)elapse; + elapseUSec = (unsigned long)(((elapse - (double)elapseSec) / (double)1e6) + 0.5); + } + + //Add frame to output buffer + class FrameMetaData tmp; + this->outBufferMeta.push_back(tmp); + class FrameMetaData &meta = this->outBufferMeta[this->outBufferMeta.size()-1]; + meta.fmt = pxFmt; + meta.width = width; + meta.height = height; + meta.buffLen = imgLen; + meta.tv_sec = elapseSec; + meta.tv_usec = elapseUSec; + std::string img(imgIn, imgLen); + this->outBuffer.push_back(img); + + this->CopyFromBufferToOutFile(0); +} + +void MfVideoOutFile::CopyFromBufferToOutFile(int lastFrame) +{ + if(this->outBuffer.size() < 2 && !lastFrame) + return; + if(this->outBuffer.size() == 0) + return; + + std::string &frame = this->outBuffer[0]; + class FrameMetaData &meta = this->outBufferMeta[0]; + class FrameMetaData *metaNext = NULL; + if(this->outBuffer.size() >= 2) + metaNext = &this->outBufferMeta[1]; + IMFSample *pSample = NULL; IMFMediaBuffer *pBuffer = NULL; const LONG cbWidth = BYTES_PER_TUPLE * this->outputWidth; const DWORD cbBuffer = cbWidth * this->outputHeight; - if(this->pSinkWriter == NULL) - this->OpenFile(); - BYTE *pData = NULL; // Create a new memory buffer. @@ -195,13 +280,13 @@ void MfVideoOutFile::SendFrame(const char *imgIn, unsigned imgLen, const char *p } if (SUCCEEDED(hr)) { - if(strcmp(this->pxFmt.c_str(), pxFmt)!=0) + if(strcmp(this->pxFmt.c_str(), meta.fmt.c_str())!=0) { //std::cout << (long) pData << std::endl; unsigned int outBuffLen = cbBuffer; - DecodeAndResizeFrame((const unsigned char *)imgIn, imgLen, pxFmt, - width, height, + DecodeAndResizeFrame((const unsigned char *)frame.c_str(), frame.size(), meta.fmt.c_str(), + meta.width, meta.height, this->pxFmt.c_str(), (unsigned char **)&pData, &outBuffLen, @@ -211,9 +296,9 @@ void MfVideoOutFile::SendFrame(const char *imgIn, unsigned imgLen, const char *p } else { - DWORD cpyLen = imgLen; + DWORD cpyLen = frame.size(); if(cbBuffer < cpyLen) cpyLen = cbBuffer; - memcpy(pData, imgIn, cpyLen); + memcpy(pData, frame.c_str(), cpyLen); } } if (pBuffer) @@ -238,13 +323,26 @@ void MfVideoOutFile::SendFrame(const char *imgIn, unsigned imgLen, const char *p } // Set the time stamp and the duration. + LONGLONG frameTime = (LONGLONG)meta.tv_sec * (LONGLONG)1e7 + (LONGLONG)meta.tv_usec * 10; + LONGLONG duration = 0; + if(metaNext!=NULL) + { + LONGLONG frameTimeNext = (LONGLONG)metaNext->tv_sec * (LONGLONG)1e7 + (LONGLONG)metaNext->tv_usec * 10; + duration = frameTimeNext - frameTime; + } + else + { + duration = this->prevFrameDuration; + if(duration == 0) duration = 1e7; //Avoid zero duration frames + } + if (SUCCEEDED(hr)) { - hr = pSample->SetSampleTime(this->rtStart); + hr = pSample->SetSampleTime(frameTime); } if (SUCCEEDED(hr)) { - hr = pSample->SetSampleDuration(this->rtDuration); + hr = pSample->SetSampleDuration(duration); } // Send the sample to the Sink Writer. @@ -253,14 +351,17 @@ void MfVideoOutFile::SendFrame(const char *imgIn, unsigned imgLen, const char *p hr = this->pSinkWriter->WriteSample(streamIndex, pSample); } - this->rtStart += this->rtDuration; - SafeRelease(&pSample); SafeRelease(&pBuffer); + + this->outBuffer.erase(this->outBuffer.begin()); + this->outBufferMeta.erase(this->outBufferMeta.begin()); + this->prevFrameDuration = duration; } void MfVideoOutFile::Stop() { + this->CloseFile(); } diff --git a/mfvideooutfile.h b/mfvideooutfile.h index cf3476f..5760eeb 100644 --- a/mfvideooutfile.h +++ b/mfvideooutfile.h @@ -25,6 +25,7 @@ class MfVideoOutFile : public Base_Video_Out virtual void SetOutputPxFmt(const char *fmt); virtual void SetFrameRate(UINT32 frameRateIn); + void MfVideoOutFile::CopyFromBufferToOutFile(int lastFrame = 0); void Run(); protected: @@ -37,6 +38,10 @@ class MfVideoOutFile : public Base_Video_Out int outputWidth, outputHeight; UINT32 bitRate, forceFrameRateFps; + FILETIME startVideoTime; + std::vector outBufferMeta; + std::vector outBuffer; + LONGLONG prevFrameDuration; }; void *MfVideoOut_File_Worker_thread(void *arg); From 052abfe6be7ba36343eb3013382fb0c418599b1c Mon Sep 17 00:00:00 2001 From: TimSC Date: Tue, 17 Dec 2013 16:34:59 +0000 Subject: [PATCH 196/256] Trap and return error in video writer --- mfvideooutfile.cpp | 34 +++++++++++++++++++++++++++++++--- videooutfile.cpp | 22 +++++++++++++++++++--- 2 files changed, 50 insertions(+), 6 deletions(-) diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index 2b3de45..a0e2088 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -101,94 +101,123 @@ void MfVideoOutFile::OpenFile() IMFMediaType *pMediaTypeOut = NULL; IMFMediaType *pMediaTypeIn = NULL; this->rtDuration = 1; + std::string errMsg; if(this->forceFrameRateFps > 0) MFFrameRateToAverageTimePerFrame(this->forceFrameRateFps, 1, &this->rtDuration); HRESULT hr = MFCreateSinkWriterFromURL(this->fina.c_str(), NULL, NULL, &pSinkWriter); + if (!SUCCEEDED(hr)) + { + errMsg = "MFCreateSinkWriterFromURL failed"; + } // Set the output media type. if (SUCCEEDED(hr)) { - hr = MFCreateMediaType(&pMediaTypeOut); + hr = MFCreateMediaType(&pMediaTypeOut); + if (!SUCCEEDED(hr)) errMsg = "MFCreateMediaType failed"; } if (SUCCEEDED(hr)) { hr = pMediaTypeOut->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); + if (!SUCCEEDED(hr)) errMsg = "SetGUID MF_MT_MAJOR_TYPE failed"; } if (SUCCEEDED(hr)) { - hr = pMediaTypeOut->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_WMV3); + //hr = pMediaTypeOut->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_WMV3); + hr = pMediaTypeOut->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264); + if (!SUCCEEDED(hr)) errMsg = "SetGUID MF_MT_SUBTYPE failed"; } if (SUCCEEDED(hr)) { hr = pMediaTypeOut->SetUINT32(MF_MT_AVG_BITRATE, this->bitRate); + if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_AVG_BITRATE failed"; } if (SUCCEEDED(hr)) { hr = pMediaTypeOut->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive); + if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_INTERLACE_MODE failed"; } if (SUCCEEDED(hr)) { hr = MFSetAttributeSize(pMediaTypeOut, MF_MT_FRAME_SIZE, this->outputWidth, this->outputHeight); + if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_FRAME_SIZE failed"; } if (SUCCEEDED(hr) && this->forceFrameRateFps > 0) { hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_FRAME_RATE, this->forceFrameRateFps, 1); + if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_FRAME_RATE failed"; } if (SUCCEEDED(hr)) { hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_PIXEL_ASPECT_RATIO, 1, 1); + if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_PIXEL_ASPECT_RATIO failed"; } if (SUCCEEDED(hr)) { hr = pSinkWriter->AddStream(pMediaTypeOut, &streamIndex); + if (!SUCCEEDED(hr)) errMsg = "Set AddStream failed"; } // Set the input media type. if (SUCCEEDED(hr)) { hr = MFCreateMediaType(&pMediaTypeIn); + if (!SUCCEEDED(hr)) errMsg = "Set MFCreateMediaType failed"; } if (SUCCEEDED(hr)) { hr = pMediaTypeIn->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); + if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_MAJOR_TYPE failed"; } if (SUCCEEDED(hr)) { hr = pMediaTypeIn->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB24); + if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_SUBTYPE failed"; } if (SUCCEEDED(hr)) { hr = pMediaTypeIn->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive); + if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_INTERLACE_MODE failed"; } if (SUCCEEDED(hr)) { hr = MFSetAttributeSize(pMediaTypeIn, MF_MT_FRAME_SIZE, this->outputWidth, this->outputHeight); + if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_FRAME_SIZE failed"; } if (SUCCEEDED(hr) && this->forceFrameRateFps > 0) { hr = MFSetAttributeRatio(pMediaTypeIn, MF_MT_FRAME_RATE, this->forceFrameRateFps, 1); + if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_FRAME_RATE failed"; } if (SUCCEEDED(hr)) { hr = MFSetAttributeRatio(pMediaTypeIn, MF_MT_PIXEL_ASPECT_RATIO, 1, 1); + if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_PIXEL_ASPECT_RATIO failed"; } if (SUCCEEDED(hr)) { hr = pSinkWriter->SetInputMediaType(streamIndex, pMediaTypeIn, NULL); + if (!SUCCEEDED(hr)) errMsg = "SetInputMediaType failed"; } // Tell the sink writer to start accepting data. if (SUCCEEDED(hr)) { hr = pSinkWriter->BeginWriting(); + if (!SUCCEEDED(hr)) errMsg = "BeginWriting failed"; } SafeRelease(&pMediaTypeOut); SafeRelease(&pMediaTypeIn); + + if(errMsg.size() > 0) + { + throw runtime_error(errMsg); + } return; } @@ -205,7 +234,6 @@ void MfVideoOutFile::CloseFile() void MfVideoOutFile::SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) { - if(this->pSinkWriter == NULL) this->OpenFile(); diff --git a/videooutfile.cpp b/videooutfile.cpp index 438f6da..63de585 100644 --- a/videooutfile.cpp +++ b/videooutfile.cpp @@ -44,18 +44,26 @@ PyObject *Video_out_file_manager_open(Video_out_file_manager *self, PyObject *ar if(!PyArg_ParseTuple(args, "ssii", &devarg, &pxFmtIn, &widthIn, &heightIn)) { - std::cout << "err" << std::endl; PyErr_Format(PyExc_RuntimeError, "Incorrect arguments to function."); Py_RETURN_NONE; } //Create worker thread pthread_t thread; + MfVideoOutFile *threadArgs = NULL; #ifdef _POSIX //TODO #endif #ifdef _NT - MfVideoOutFile *threadArgs = new MfVideoOutFile(devarg); + try + { + threadArgs = new MfVideoOutFile(devarg); + } + catch(std::exception &err) + { + PyErr_Format(PyExc_RuntimeError, err.what()); + Py_RETURN_NONE; + } #endif #ifdef _NT //TODO Remove ifdef when POSIX approah is established @@ -112,7 +120,15 @@ PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObje if(it != self->threads->end()) { - it->second->SendFrame(imgIn, imgLen, pxFmtIn, widthIn, heightIn); + try + { + it->second->SendFrame(imgIn, imgLen, pxFmtIn, widthIn, heightIn); + } + catch (std::exception &err) + { + PyErr_Format(PyExc_RuntimeError, err.what()); + Py_RETURN_NONE; + } } else { From df8564fe66bf4cac2372ab716fa6923276248363 Mon Sep 17 00:00:00 2001 From: TimSC Date: Tue, 17 Dec 2013 17:18:37 +0000 Subject: [PATCH 197/256] Print more error info with hr result --- mfvideooutfile.cpp | 47 ++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 45 insertions(+), 2 deletions(-) diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index a0e2088..300ae5b 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -5,6 +5,7 @@ #include #include #include +#include using namespace std; template void SafeRelease(T **ppT) @@ -16,6 +17,24 @@ template void SafeRelease(T **ppT) } } +std::string HrCodeToStdString(HRESULT hr) +{ + std::string out; + _com_error err(hr); + LPCTSTR hrErrMsg = err.ErrorMessage(); + +#ifdef UNICODE + size_t errsize = wcstombs(NULL, hrErrMsg, 0); + char* tmpStr = new char[errsize + 1]; + wcstombs(tmpStr, hrErrMsg, errsize + 1 ); + out = hrErrMsg; + delete tmpStr; +#else + out = hrErrMsg; +#endif + return out; +} + FILETIME GetTimeNow() { SYSTEMTIME systime; @@ -158,7 +177,28 @@ void MfVideoOutFile::OpenFile() if (SUCCEEDED(hr)) { hr = pSinkWriter->AddStream(pMediaTypeOut, &streamIndex); - if (!SUCCEEDED(hr)) errMsg = "Set AddStream failed"; + if (!SUCCEEDED(hr)) errMsg = "AddStream failed"; + } + + // Get supported types of output + IMFTransform *transform = NULL; + if (SUCCEEDED(hr)) + { + hr = pSinkWriter->GetServiceForStream(streamIndex, GUID_NULL, IID_IMFTransform, (LPVOID*)&transform); + if (!SUCCEEDED(hr)) + { + errMsg = "GetServiceForStream failed: "; + std::string hrErrStr = HrCodeToStdString(hr); + errMsg += hrErrStr; + } + } + + if (SUCCEEDED(hr) && transform != NULL) + { + IMFMediaType *fmtType; + hr = transform->GetInputAvailableType(streamIndex, 0, &fmtType); + std::cout << SUCCEEDED(hr) << "," << (LONG)fmtType << std::endl; + if (!SUCCEEDED(hr)) errMsg = "GetInputAvailableType failed"; } // Set the input media type. @@ -202,6 +242,9 @@ void MfVideoOutFile::OpenFile() { hr = pSinkWriter->SetInputMediaType(streamIndex, pMediaTypeIn, NULL); if (!SUCCEEDED(hr)) errMsg = "SetInputMediaType failed"; + if(hr == MF_E_INVALIDMEDIATYPE) errMsg.append(": MF_E_INVALIDMEDIATYPE"); + if(hr == MF_E_INVALIDSTREAMNUMBER) errMsg.append(": MF_E_INVALIDSTREAMNUMBER"); + if(hr == MF_E_TOPO_CODEC_NOT_FOUND) errMsg.append(": MF_E_TOPO_CODEC_NOT_FOUND"); } // Tell the sink writer to start accepting data. @@ -361,7 +404,7 @@ void MfVideoOutFile::CopyFromBufferToOutFile(int lastFrame) else { duration = this->prevFrameDuration; - if(duration == 0) duration = 1e7; //Avoid zero duration frames + if(duration == 0) duration = (LONGLONG)1e7; //Avoid zero duration frames } if (SUCCEEDED(hr)) From f31fd1bf3ced209ab6618169092e79ea412f3101 Mon Sep 17 00:00:00 2001 From: TimSC Date: Tue, 17 Dec 2013 18:09:39 +0000 Subject: [PATCH 198/256] Progress on encoding h264 but rate rate must be set --- libvideolive.cpp | 2 +- mfvideooutfile.cpp | 32 +++++++++++++++++++++++++------- mfvideooutfile.h | 2 ++ videooutfile.py | 2 +- 4 files changed, 29 insertions(+), 9 deletions(-) diff --git a/libvideolive.cpp b/libvideolive.cpp index 24b317f..fe956f3 100644 --- a/libvideolive.cpp +++ b/libvideolive.cpp @@ -138,7 +138,7 @@ static PyMethodDef Video_out_file_manager_methods[] = { "Close video device. Subsequent calls to other methods will fail."}, {"set_frame_rate", (PyCFunction)Video_out_file_manager_Set_Frame_Rate, METH_VARARGS, "set_frame_rate(filename = 'out.wmv', frame_rate)\n\n" - "Set output frame rate. Use frame rate of 0 for real time frame processing."}, + "Set output frame rate."}, {NULL} }; diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index 300ae5b..5949d2e 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -92,6 +92,7 @@ MfVideoOutFile::MfVideoOutFile(const char *fiName) : Base_Video_Out() this->streamIndex = 0; this->rtStart = 0; this->pxFmt = "BGR24"; + this->videoCodec = "WMV3"; this->outputWidth = 640; this->outputHeight = 480; @@ -143,8 +144,10 @@ void MfVideoOutFile::OpenFile() } if (SUCCEEDED(hr)) { - //hr = pMediaTypeOut->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_WMV3); - hr = pMediaTypeOut->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264); + if(strcmp(this->videoCodec.c_str(), "WMV3")==0) + hr = pMediaTypeOut->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_WMV3); + if(strcmp(this->videoCodec.c_str(), "H264")==0) + hr = pMediaTypeOut->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264); if (!SUCCEEDED(hr)) errMsg = "SetGUID MF_MT_SUBTYPE failed"; } if (SUCCEEDED(hr)) @@ -163,7 +166,7 @@ void MfVideoOutFile::OpenFile() hr = MFSetAttributeSize(pMediaTypeOut, MF_MT_FRAME_SIZE, this->outputWidth, this->outputHeight); if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_FRAME_SIZE failed"; } - if (SUCCEEDED(hr) && this->forceFrameRateFps > 0) + if (SUCCEEDED(hr)) { hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_FRAME_RATE, this->forceFrameRateFps, 1); if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_FRAME_RATE failed"; @@ -181,7 +184,7 @@ void MfVideoOutFile::OpenFile() } // Get supported types of output - IMFTransform *transform = NULL; + /*IMFTransform *transform = NULL; if (SUCCEEDED(hr)) { hr = pSinkWriter->GetServiceForStream(streamIndex, GUID_NULL, IID_IMFTransform, (LPVOID*)&transform); @@ -199,7 +202,7 @@ void MfVideoOutFile::OpenFile() hr = transform->GetInputAvailableType(streamIndex, 0, &fmtType); std::cout << SUCCEEDED(hr) << "," << (LONG)fmtType << std::endl; if (!SUCCEEDED(hr)) errMsg = "GetInputAvailableType failed"; - } + }*/ // Set the input media type. if (SUCCEEDED(hr)) @@ -215,7 +218,10 @@ void MfVideoOutFile::OpenFile() if (SUCCEEDED(hr)) { - hr = pMediaTypeIn->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB24); + if(strcmp(this->pxFmt.c_str(), "BGR24")==0) + hr = pMediaTypeIn->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB24); + if(strcmp(this->pxFmt.c_str(), "YUY2")==0) //Supported by H264 + hr = pMediaTypeIn->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_YUY2); if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_SUBTYPE failed"; } if (SUCCEEDED(hr)) @@ -228,7 +234,7 @@ void MfVideoOutFile::OpenFile() hr = MFSetAttributeSize(pMediaTypeIn, MF_MT_FRAME_SIZE, this->outputWidth, this->outputHeight); if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_FRAME_SIZE failed"; } - if (SUCCEEDED(hr) && this->forceFrameRateFps > 0) + if (SUCCEEDED(hr)) { hr = MFSetAttributeRatio(pMediaTypeIn, MF_MT_FRAME_RATE, this->forceFrameRateFps, 1); if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_FRAME_RATE failed"; @@ -471,6 +477,18 @@ void MfVideoOutFile::SetFrameRate(UINT32 frameRateIn) this->forceFrameRateFps = frameRateIn; } +void MfVideoOutFile::SetVideoCodec(const char *codec, UINT32 bitrateIn) +{ + if(this->pSinkWriter != NULL) + { + throw std::runtime_error("Set video parameters before opening video file"); + } + if(codec!=NULL) + this->videoCodec = codec; + if(bitrateIn > 0) + this->bitRate = bitrateIn; +} + void MfVideoOutFile::Run() { diff --git a/mfvideooutfile.h b/mfvideooutfile.h index 5760eeb..61bd18f 100644 --- a/mfvideooutfile.h +++ b/mfvideooutfile.h @@ -24,6 +24,7 @@ class MfVideoOutFile : public Base_Video_Out virtual void SetOutputSize(int width, int height); virtual void SetOutputPxFmt(const char *fmt); virtual void SetFrameRate(UINT32 frameRateIn); + virtual void SetVideoCodec(const char *codec, UINT32 bitrate); void MfVideoOutFile::CopyFromBufferToOutFile(int lastFrame = 0); void Run(); @@ -34,6 +35,7 @@ class MfVideoOutFile : public Base_Video_Out LONGLONG rtStart; UINT64 rtDuration; std::string pxFmt; + std::string videoCodec; std::wstring fina; int outputWidth, outputHeight; diff --git a/videooutfile.py b/videooutfile.py index 7896c3b..4b31582 100644 --- a/videooutfile.py +++ b/videooutfile.py @@ -13,7 +13,7 @@ h = lena.shape[0] outManager.open("test.wmv", "BGR24", 640, 480) - outManager.set_frame_rate("test.wmv", 5) + outManager.set_frame_rate("test.wmv", 10) imgLen = w * h * 3 #img = np.ones(shape=(imgLen,), dtype=np.uint8) * 0 #for i in range(imgLen): From 08e0cd1b6efcd137815ed51d4294010e9756be7b Mon Sep 17 00:00:00 2001 From: TimSC Date: Tue, 17 Dec 2013 18:56:41 +0000 Subject: [PATCH 199/256] Add method to set real time frame encoding --- base.h | 2 ++ libvideolive.cpp | 6 ++++ mfvideooutfile.cpp | 23 +++++++++----- mfvideooutfile.h | 4 ++- videooutfile.cpp | 76 ++++++++++++++++++++++++++++++++++++++++++++++ videooutfile.h | 2 ++ videooutfile.py | 6 ++-- 7 files changed, 109 insertions(+), 10 deletions(-) diff --git a/base.h b/base.h index 5c83132..f2ee0ea 100644 --- a/base.h +++ b/base.h @@ -76,6 +76,8 @@ class Base_Video_Out virtual void SetOutputSize(int width, int height) {}; virtual void SetOutputPxFmt(const char *fmt) {}; virtual void SetFrameRate(UINT32 frameRateIn) {}; + virtual void SetVideoCodec(const char *codec, UINT32 bitrate) {}; + virtual void EnableRealTimeFrameRate(int varEnable) {}; void Run() {}; }; diff --git a/libvideolive.cpp b/libvideolive.cpp index fe956f3..a7e20de 100644 --- a/libvideolive.cpp +++ b/libvideolive.cpp @@ -139,6 +139,12 @@ static PyMethodDef Video_out_file_manager_methods[] = { {"set_frame_rate", (PyCFunction)Video_out_file_manager_Set_Frame_Rate, METH_VARARGS, "set_frame_rate(filename = 'out.wmv', frame_rate)\n\n" "Set output frame rate."}, + {"set_video_codec", (PyCFunction)Video_out_file_manager_Set_Video_Codec, METH_VARARGS, + "set_video_codec(filename = 'out.wmv', codec = 'H264', bitrate)\n\n" + "Set output video codec."}, + {"enable_real_time_frame_rate", (PyCFunction)Video_out_file_manager_Enable_Real_Time_Frame_Rate, METH_VARARGS, + "enable_real_time_frame_rate(filename = 'out.wmv', enable)\n\n" + "Set real time frame encoding."}, {NULL} }; diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index 5949d2e..8cd4087 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -98,7 +98,7 @@ MfVideoOutFile::MfVideoOutFile(const char *fiName) : Base_Video_Out() this->outputHeight = 480; this->bitRate = 800000; this->fina = CStringToWString(fiName); - this->forceFrameRateFps = 0; + this->frameRateFps = 0; this->prevFrameDuration = 0; SetTimeToZero(this->startVideoTime); } @@ -122,8 +122,8 @@ void MfVideoOutFile::OpenFile() IMFMediaType *pMediaTypeIn = NULL; this->rtDuration = 1; std::string errMsg; - if(this->forceFrameRateFps > 0) - MFFrameRateToAverageTimePerFrame(this->forceFrameRateFps, 1, &this->rtDuration); + if(!this->variableFrameRateEnabled) + MFFrameRateToAverageTimePerFrame(this->frameRateFps, 1, &this->rtDuration); HRESULT hr = MFCreateSinkWriterFromURL(this->fina.c_str(), NULL, NULL, &pSinkWriter); if (!SUCCEEDED(hr)) @@ -168,7 +168,7 @@ void MfVideoOutFile::OpenFile() } if (SUCCEEDED(hr)) { - hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_FRAME_RATE, this->forceFrameRateFps, 1); + hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_FRAME_RATE, this->frameRateFps, 1); if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_FRAME_RATE failed"; } if (SUCCEEDED(hr)) @@ -236,7 +236,7 @@ void MfVideoOutFile::OpenFile() } if (SUCCEEDED(hr)) { - hr = MFSetAttributeRatio(pMediaTypeIn, MF_MT_FRAME_RATE, this->forceFrameRateFps, 1); + hr = MFSetAttributeRatio(pMediaTypeIn, MF_MT_FRAME_RATE, this->frameRateFps, 1); if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_FRAME_RATE failed"; } if (SUCCEEDED(hr)) @@ -295,7 +295,7 @@ void MfVideoOutFile::SendFrame(const char *imgIn, unsigned imgLen, const char *p //Time since video start unsigned long elapseSec = 0; unsigned long elapseUSec = 0; - if(this->forceFrameRateFps > 0) + if(!this->variableFrameRateEnabled) { //Fixed frame rate elapseSec = (unsigned long)(this->rtStart / 1e7); @@ -474,7 +474,7 @@ void MfVideoOutFile::SetFrameRate(UINT32 frameRateIn) { throw std::runtime_error("Set video parameters before opening video file"); } - this->forceFrameRateFps = frameRateIn; + this->frameRateFps = frameRateIn; } void MfVideoOutFile::SetVideoCodec(const char *codec, UINT32 bitrateIn) @@ -489,6 +489,15 @@ void MfVideoOutFile::SetVideoCodec(const char *codec, UINT32 bitrateIn) this->bitRate = bitrateIn; } +void MfVideoOutFile::EnableRealTimeFrameRate(int varEnable) +{ + if(this->pSinkWriter != NULL) + { + throw std::runtime_error("Set video parameters before opening video file"); + } + this->variableFrameRateEnabled = varEnable; +} + void MfVideoOutFile::Run() { diff --git a/mfvideooutfile.h b/mfvideooutfile.h index 61bd18f..b7cf630 100644 --- a/mfvideooutfile.h +++ b/mfvideooutfile.h @@ -25,6 +25,7 @@ class MfVideoOutFile : public Base_Video_Out virtual void SetOutputPxFmt(const char *fmt); virtual void SetFrameRate(UINT32 frameRateIn); virtual void SetVideoCodec(const char *codec, UINT32 bitrate); + virtual void EnableRealTimeFrameRate(int varEnable); void MfVideoOutFile::CopyFromBufferToOutFile(int lastFrame = 0); void Run(); @@ -37,9 +38,10 @@ class MfVideoOutFile : public Base_Video_Out std::string pxFmt; std::string videoCodec; std::wstring fina; + int variableFrameRateEnabled; int outputWidth, outputHeight; - UINT32 bitRate, forceFrameRateFps; + UINT32 bitRate, frameRateFps; FILETIME startVideoTime; std::vector outBufferMeta; std::vector outBuffer; diff --git a/videooutfile.cpp b/videooutfile.cpp index 63de585..891d8da 100644 --- a/videooutfile.cpp +++ b/videooutfile.cpp @@ -192,3 +192,79 @@ PyObject *Video_out_file_manager_Set_Frame_Rate(Video_out_file_manager *self, Py Py_RETURN_NONE; } + +PyObject *Video_out_file_manager_Set_Video_Codec(Video_out_file_manager *self, PyObject *args) +{ + //Process arguments + const char *devarg = NULL; + char *videoCodec = NULL; + int bitRate = 0; + + if(PyObject_Length(args) < 2) + { + PyErr_Format(PyExc_RuntimeError, "Too few arguments."); + Py_RETURN_NONE; + } + + PyObject *pydev = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydev); + + PyObject *pyVideoCodec = PyTuple_GetItem(args, 1); + if(pyVideoCodec != Py_None) + videoCodec = PyString_AsString(pyVideoCodec); + else + videoCodec = NULL; + + if(PyObject_Length(args) >= 3) + { + PyObject *pyBitRate = PyTuple_GetItem(args, 2); + bitRate = PyInt_AsLong(pyBitRate); + } + + std::map::iterator it = self->threads->find(devarg); + + if(it != self->threads->end()) + { + it->second->SetVideoCodec(videoCodec, bitRate); + } + else + { + PyErr_Format(PyExc_RuntimeError, "Device not found."); + Py_RETURN_NONE; + } + + Py_RETURN_NONE; +} + +PyObject *Video_out_file_manager_Enable_Real_Time_Frame_Rate(Video_out_file_manager *self, PyObject *args) +{ + //Process arguments + const char *devarg = NULL; + int realTimeFrameRate = 0; + + if(PyObject_Length(args) < 2) + { + PyErr_Format(PyExc_RuntimeError, "Too few arguments."); + Py_RETURN_NONE; + } + + PyObject *pydev = PyTuple_GetItem(args, 0); + devarg = PyString_AsString(pydev); + + PyObject *pyRealTimeFrameRate = PyTuple_GetItem(args, 1); + realTimeFrameRate = PyInt_AsLong(pyRealTimeFrameRate); + + std::map::iterator it = self->threads->find(devarg); + + if(it != self->threads->end()) + { + it->second->EnableRealTimeFrameRate(realTimeFrameRate); + } + else + { + PyErr_Format(PyExc_RuntimeError, "Device not found."); + Py_RETURN_NONE; + } + + Py_RETURN_NONE; +} diff --git a/videooutfile.h b/videooutfile.h index 274f93d..19706cc 100644 --- a/videooutfile.h +++ b/videooutfile.h @@ -23,6 +23,8 @@ PyObject *Video_out_file_manager_open(Video_out_file_manager *self, PyObject *ar PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObject *args); PyObject *Video_out_file_manager_close(Video_out_file_manager *self, PyObject *args); PyObject *Video_out_file_manager_Set_Frame_Rate(Video_out_file_manager *self, PyObject *args); +PyObject *Video_out_file_manager_Set_Video_Codec(Video_out_file_manager *self, PyObject *args); +PyObject *Video_out_file_manager_Enable_Real_Time_Frame_Rate(Video_out_file_manager *self, PyObject *args); #endif //VIDEOOUTFILE_H diff --git a/videooutfile.py b/videooutfile.py index 4b31582..dcac201 100644 --- a/videooutfile.py +++ b/videooutfile.py @@ -13,7 +13,9 @@ h = lena.shape[0] outManager.open("test.wmv", "BGR24", 640, 480) - outManager.set_frame_rate("test.wmv", 10) + outManager.set_frame_rate("test.wmv", 25) + outManager.enable_real_time_frame_rate("test.wmv", 1) + imgLen = w * h * 3 #img = np.ones(shape=(imgLen,), dtype=np.uint8) * 0 #for i in range(imgLen): @@ -31,5 +33,5 @@ print "Frame", frNum outManager.send_frame("test.wmv", str(lena.tostring()), "RGB24", w, h) - time.sleep(0.01) + time.sleep(frNum / 500.) From 7a499dc528ee6bf6b0fe2a6c31fcfd6409ce6fed Mon Sep 17 00:00:00 2001 From: TimSC Date: Wed, 18 Dec 2013 02:44:58 +0000 Subject: [PATCH 200/256] H264 greyscale sort of working --- mfvideooutfile.cpp | 36 ++++++++++++++++------- pixfmt.cpp | 73 ++++++++++++++++++++++++++++++++++++++++++++-- videoout.cpp | 10 ++++++- videooutfile.cpp | 52 ++++++++++++++++++++++++++++++--- videooutfile.py | 12 +++++--- 5 files changed, 160 insertions(+), 23 deletions(-) diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index 8cd4087..77de4d9 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -76,8 +76,6 @@ std::wstring CStringToWString(const char *inStr) return tmpDevName2; } -const UINT32 BYTES_PER_TUPLE = 3; - MfVideoOutFile::MfVideoOutFile(const char *fiName) : Base_Video_Out() { HRESULT hr = MFStartup(MF_VERSION); @@ -91,8 +89,8 @@ MfVideoOutFile::MfVideoOutFile(const char *fiName) : Base_Video_Out() this->pSinkWriter = NULL; this->streamIndex = 0; this->rtStart = 0; - this->pxFmt = "BGR24"; - this->videoCodec = "WMV3"; + this->pxFmt = "YV12"; //"BGR24"; + this->videoCodec = "H264"; //"WMV3"; this->outputWidth = 640; this->outputHeight = 480; @@ -100,6 +98,7 @@ MfVideoOutFile::MfVideoOutFile(const char *fiName) : Base_Video_Out() this->fina = CStringToWString(fiName); this->frameRateFps = 0; this->prevFrameDuration = 0; + this->variableFrameRateEnabled = 0; SetTimeToZero(this->startVideoTime); } @@ -220,8 +219,10 @@ void MfVideoOutFile::OpenFile() { if(strcmp(this->pxFmt.c_str(), "BGR24")==0) hr = pMediaTypeIn->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB24); - if(strcmp(this->pxFmt.c_str(), "YUY2")==0) //Supported by H264 - hr = pMediaTypeIn->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_YUY2); + if(strcmp(this->pxFmt.c_str(), "I420")==0) + hr = pMediaTypeIn->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_I420); + if(strcmp(this->pxFmt.c_str(), "YV12")==0) //Supported by H264 + hr = pMediaTypeIn->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_YV12); if (!SUCCEEDED(hr)) errMsg = "Set MF_MT_SUBTYPE failed"; } if (SUCCEEDED(hr)) @@ -341,9 +342,21 @@ void MfVideoOutFile::CopyFromBufferToOutFile(int lastFrame) IMFSample *pSample = NULL; IMFMediaBuffer *pBuffer = NULL; + DWORD cbBuffer = 0; + + if(strcmp(this->pxFmt.c_str(), "BGR24") == 0) + { + LONG cbWidth = 3 * this->outputWidth; + cbBuffer = cbWidth * this->outputHeight; + } + + if(strcmp(this->pxFmt.c_str(), "I420") == 0 || strcmp(this->pxFmt.c_str(), "YV12") == 0) + { + cbBuffer = 1.5 * this->outputHeight * this->outputWidth; + } - const LONG cbWidth = BYTES_PER_TUPLE * this->outputWidth; - const DWORD cbBuffer = cbWidth * this->outputHeight; + if(cbBuffer==0) + throw std::runtime_error("Unsupported pixel format"); BYTE *pData = NULL; @@ -360,7 +373,6 @@ void MfVideoOutFile::CopyFromBufferToOutFile(int lastFrame) if(strcmp(this->pxFmt.c_str(), meta.fmt.c_str())!=0) { //std::cout << (long) pData << std::endl; - unsigned int outBuffLen = cbBuffer; DecodeAndResizeFrame((const unsigned char *)frame.c_str(), frame.size(), meta.fmt.c_str(), meta.width, meta.height, @@ -463,8 +475,10 @@ void MfVideoOutFile::SetOutputPxFmt(const char *fmt) { throw std::runtime_error("Set video format before opening video file"); } - if(strcmp(fmt,"BGR24")!=0) - throw std::runtime_error("Only BGR24 is supported"); + if(strcmp(fmt,"BGR24")!=0 && strcmp(fmt,"I420")!=0 && strcmp(fmt,"YV12")!=0) + { + throw std::runtime_error("Only BGR24, YV12 and I420 is supported"); + } this->pxFmt = fmt; } diff --git a/pixfmt.cpp b/pixfmt.cpp index 5751e9f..2706cb0 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -323,7 +323,7 @@ int ReadJpegFile(unsigned char * inbuffer, // ************************************************************** -void ConvertRGBtoYUYVorSimilar(const unsigned char *im, unsigned sizeimage, +int ConvertRGBtoYUYVorSimilar(const unsigned char *im, unsigned sizeimage, unsigned width, unsigned height, const char *targetPxFmt, unsigned char **outIm, unsigned *outImSize) { @@ -405,7 +405,65 @@ void ConvertRGBtoYUYVorSimilar(const unsigned char *im, unsigned sizeimage, cursor += 4; } } + + return 1; +} + +// ********************************************************************* + +int ConvertRgb24ToI420orYV12(const unsigned char *im, unsigned dataLen, + int width, int height, + unsigned char **buffOut, + unsigned *buffOutLen, + const char *outPxFmt) +{ + //Create output buffer if required + int requiredSize = width * height * 1.5; + if(*buffOutLen != 0 && *buffOutLen != requiredSize) + throw std::runtime_error("Output buffer has incorrect size"); + *buffOutLen = requiredSize; + if(*buffOut == NULL) + *buffOut = new unsigned char [*buffOutLen]; + + memset(*buffOut, 128, *buffOutLen); + + unsigned uPlaneOffset = width * height; + unsigned vPlaneOffset = width * height * 1.25; + + for(int x = 0; x < width ; x+=2) + { + for(int y = 0; y < height; y+=2) + { + unsigned YOutOffset1 = width * y + x; + unsigned rgbInOffset1 = width * y * 3 + x * 3; + unsigned YOutOffset2 = width * y + (x+1); + unsigned rgbInOffset2 = width * y * 3 + (x+1) * 3; + unsigned YOutOffset3 = width * (y+1) + x; + unsigned rgbInOffset3 = width * (y+1) * 3 + x * 3; + unsigned YOutOffset4 = width * (y+1) + (x+1); + unsigned rgbInOffset4 = width * (y+1) * 3 + (x+1) * 3; + + unsigned Y1 = 66 * im[rgbInOffset1] + 129 * im[rgbInOffset1+1] + 25 * im[rgbInOffset1+2]; + unsigned Y2 = 66 * im[rgbInOffset2] + 129 * im[rgbInOffset1+2] + 25 * im[rgbInOffset2+2]; + unsigned Y3 = 66 * im[rgbInOffset3] + 129 * im[rgbInOffset1+3] + 25 * im[rgbInOffset3+2]; + unsigned Y4 = 66 * im[rgbInOffset4] + 129 * im[rgbInOffset1+4] + 25 * im[rgbInOffset4+2]; + + Y1 = ((Y1 + 128) >> 8) + 16; + Y2 = ((Y2 + 128) >> 8) + 16; + Y3 = ((Y3 + 128) >> 8) + 16; + Y4 = ((Y4 + 128) >> 8) + 16; + + (*buffOut)[YOutOffset1] = Y1; + (*buffOut)[YOutOffset2] = Y2; + (*buffOut)[YOutOffset3] = Y3; + (*buffOut)[YOutOffset4] = Y4; + + } + } + + return 1; } + // ********************************************************************* int DecodeFrame(const unsigned char *data, unsigned dataLen, @@ -507,15 +565,24 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, return 1; } + if(strcmp(inPxFmt,"RGB24")==0 && + (strcmp(targetPxFmt, "I420")==0 || strcmp(targetPxFmt, "YV12")==0)) + { + int ret = ConvertRgb24ToI420orYV12(data, dataLen, + width, height, + buffOut, buffOutLen, targetPxFmt); + return ret; + } + if(strcmp(inPxFmt,"RGB24")==0 && (strcmp(targetPxFmt, "YUYV")==0 || strcmp(targetPxFmt, "UYVY")==0) ) { - ConvertRGBtoYUYVorSimilar(data, dataLen, + int ret = ConvertRGBtoYUYVorSimilar(data, dataLen, width, height, targetPxFmt, buffOut, buffOutLen); - return 1; + return ret; } //RGB24 -> BGR24 diff --git a/videoout.cpp b/videoout.cpp index edda79b..96f84e1 100644 --- a/videoout.cpp +++ b/videoout.cpp @@ -110,7 +110,15 @@ PyObject *Video_out_manager_Send_frame(Video_out_manager *self, PyObject *args) if(it != self->threads->end()) { - it->second->SendFrame(imgIn, imgLen, pxFmtIn, widthIn, heightIn); + try + { + it->second->SendFrame(imgIn, imgLen, pxFmtIn, widthIn, heightIn); + } + catch(std::exception &err) + { + PyErr_Format(PyExc_RuntimeError, err.what()); + Py_RETURN_NONE; + } } else { diff --git a/videooutfile.cpp b/videooutfile.cpp index 891d8da..747b110 100644 --- a/videooutfile.cpp +++ b/videooutfile.cpp @@ -68,8 +68,16 @@ PyObject *Video_out_file_manager_open(Video_out_file_manager *self, PyObject *ar #ifdef _NT //TODO Remove ifdef when POSIX approah is established (*self->threads)[devarg] = threadArgs; - threadArgs->SetOutputSize(widthIn, heightIn); - threadArgs->SetOutputPxFmt(pxFmtIn); + try + { + threadArgs->SetOutputSize(widthIn, heightIn); + threadArgs->SetOutputPxFmt(pxFmtIn); + } + catch(std::exception &err) + { + PyErr_SetString(PyExc_RuntimeError, err.what()); + Py_RETURN_NONE; + } #endif #ifdef _POSIX @@ -84,7 +92,7 @@ PyObject *Video_out_file_manager_open(Video_out_file_manager *self, PyObject *ar PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObject *args) { - //printf("Video_out_manager_Send_frame\n"); + printf("Video_out_manager_Send_frame\n"); //dev = '\\dev\\video0', img, pixel_format, width, height //Process arguments @@ -154,7 +162,16 @@ PyObject *Video_out_file_manager_close(Video_out_file_manager *self, PyObject *a if(it != self->threads->end()) { + try + { it->second->Stop(); + } + catch(std::exception &err) + { + PyErr_Format(PyExc_RuntimeError, err.what()); + Py_RETURN_NONE; + } + } Py_RETURN_NONE; @@ -182,7 +199,16 @@ PyObject *Video_out_file_manager_Set_Frame_Rate(Video_out_file_manager *self, Py if(it != self->threads->end()) { + try + { it->second->SetFrameRate(frameRate); + } + catch(std::exception &err) + { + PyErr_Format(PyExc_RuntimeError, err.what()); + Py_RETURN_NONE; + } + } else { @@ -225,7 +251,16 @@ PyObject *Video_out_file_manager_Set_Video_Codec(Video_out_file_manager *self, P if(it != self->threads->end()) { + try + { it->second->SetVideoCodec(videoCodec, bitRate); + } + catch(std::exception &err) + { + PyErr_Format(PyExc_RuntimeError, err.what()); + Py_RETURN_NONE; + } + } else { @@ -258,7 +293,16 @@ PyObject *Video_out_file_manager_Enable_Real_Time_Frame_Rate(Video_out_file_mana if(it != self->threads->end()) { - it->second->EnableRealTimeFrameRate(realTimeFrameRate); + try + { + it->second->EnableRealTimeFrameRate(realTimeFrameRate); + } + catch(std::exception &err) + { + PyErr_Format(PyExc_RuntimeError, err.what()); + Py_RETURN_NONE; + } + } else { diff --git a/videooutfile.py b/videooutfile.py index dcac201..8b2446f 100644 --- a/videooutfile.py +++ b/videooutfile.py @@ -12,9 +12,11 @@ w = lena.shape[1] h = lena.shape[0] - outManager.open("test.wmv", "BGR24", 640, 480) + realTimeFrames = 0 + + outManager.open("test.wmv", "YV12", 640, 480) outManager.set_frame_rate("test.wmv", 25) - outManager.enable_real_time_frame_rate("test.wmv", 1) + outManager.enable_real_time_frame_rate("test.wmv", realTimeFrames) imgLen = w * h * 3 #img = np.ones(shape=(imgLen,), dtype=np.uint8) * 0 @@ -33,5 +35,7 @@ print "Frame", frNum outManager.send_frame("test.wmv", str(lena.tostring()), "RGB24", w, h) - time.sleep(frNum / 500.) - + if realTimeFrames: + time.sleep(frNum / 500.) + else: + time.sleep(0.01) From 43881808c27b80abc858e92fe67b6c2208a30cff Mon Sep 17 00:00:00 2001 From: TimSC Date: Wed, 18 Dec 2013 02:53:12 +0000 Subject: [PATCH 201/256] Fix formula typo --- pixfmt.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pixfmt.cpp b/pixfmt.cpp index 2706cb0..65adb67 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -444,9 +444,9 @@ int ConvertRgb24ToI420orYV12(const unsigned char *im, unsigned dataLen, unsigned rgbInOffset4 = width * (y+1) * 3 + (x+1) * 3; unsigned Y1 = 66 * im[rgbInOffset1] + 129 * im[rgbInOffset1+1] + 25 * im[rgbInOffset1+2]; - unsigned Y2 = 66 * im[rgbInOffset2] + 129 * im[rgbInOffset1+2] + 25 * im[rgbInOffset2+2]; - unsigned Y3 = 66 * im[rgbInOffset3] + 129 * im[rgbInOffset1+3] + 25 * im[rgbInOffset3+2]; - unsigned Y4 = 66 * im[rgbInOffset4] + 129 * im[rgbInOffset1+4] + 25 * im[rgbInOffset4+2]; + unsigned Y2 = 66 * im[rgbInOffset2] + 129 * im[rgbInOffset2+1] + 25 * im[rgbInOffset2+2]; + unsigned Y3 = 66 * im[rgbInOffset3] + 129 * im[rgbInOffset3+1] + 25 * im[rgbInOffset3+2]; + unsigned Y4 = 66 * im[rgbInOffset4] + 129 * im[rgbInOffset4+1] + 25 * im[rgbInOffset4+2]; Y1 = ((Y1 + 128) >> 8) + 16; Y2 = ((Y2 + 128) >> 8) + 16; From 8801b65a0c06cc85ab56078aa92a45be1eb5158f Mon Sep 17 00:00:00 2001 From: TimSC Date: Wed, 18 Dec 2013 14:06:06 +0000 Subject: [PATCH 202/256] Colour I420 now working --- pixfmt.cpp | 42 +++++++++++++++++++++++++++++++++++++++--- 1 file changed, 39 insertions(+), 3 deletions(-) diff --git a/pixfmt.cpp b/pixfmt.cpp index 65adb67..4ebcf15 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -425,10 +425,20 @@ int ConvertRgb24ToI420orYV12(const unsigned char *im, unsigned dataLen, if(*buffOut == NULL) *buffOut = new unsigned char [*buffOutLen]; - memset(*buffOut, 128, *buffOutLen); + //memset(*buffOut, 128, *buffOutLen); + unsigned uPlaneOffset = 0; + unsigned vPlaneOffset = 0; - unsigned uPlaneOffset = width * height; - unsigned vPlaneOffset = width * height * 1.25; + if(strcmp(outPxFmt, "I420")==0) + { + uPlaneOffset = width * height; + vPlaneOffset = width * height * 1.25; + } + else + { + uPlaneOffset = width * height * 1.25; + vPlaneOffset = width * height; + } for(int x = 0; x < width ; x+=2) { @@ -443,21 +453,47 @@ int ConvertRgb24ToI420orYV12(const unsigned char *im, unsigned dataLen, unsigned YOutOffset4 = width * (y+1) + (x+1); unsigned rgbInOffset4 = width * (y+1) * 3 + (x+1) * 3; + unsigned colOffset = (width/2) * (y/2) + (x/2); + unsigned UOutOffset = colOffset + uPlaneOffset; + unsigned VOutOffset = colOffset + vPlaneOffset; + unsigned Y1 = 66 * im[rgbInOffset1] + 129 * im[rgbInOffset1+1] + 25 * im[rgbInOffset1+2]; unsigned Y2 = 66 * im[rgbInOffset2] + 129 * im[rgbInOffset2+1] + 25 * im[rgbInOffset2+2]; unsigned Y3 = 66 * im[rgbInOffset3] + 129 * im[rgbInOffset3+1] + 25 * im[rgbInOffset3+2]; unsigned Y4 = 66 * im[rgbInOffset4] + 129 * im[rgbInOffset4+1] + 25 * im[rgbInOffset4+2]; + unsigned U1 = -38 * im[rgbInOffset1] - 74 * im[rgbInOffset1+1] + 112 * im[rgbInOffset1+2]; + unsigned U2 = -38 * im[rgbInOffset2] - 74 * im[rgbInOffset2+1] + 112 * im[rgbInOffset2+2]; + unsigned U3 = -38 * im[rgbInOffset3] - 74 * im[rgbInOffset3+1] + 112 * im[rgbInOffset3+2]; + unsigned U4 = -38 * im[rgbInOffset4] - 74 * im[rgbInOffset4+1] + 112 * im[rgbInOffset4+2]; + + unsigned V1 = 112 * im[rgbInOffset1] - 94 * im[rgbInOffset1+1] - 18 * im[rgbInOffset1+2]; + unsigned V2 = 112 * im[rgbInOffset2] - 94 * im[rgbInOffset2+1] - 18 * im[rgbInOffset2+2]; + unsigned V3 = 112 * im[rgbInOffset3] - 94 * im[rgbInOffset3+1] - 18 * im[rgbInOffset3+2]; + unsigned V4 = 112 * im[rgbInOffset4] - 94 * im[rgbInOffset4+1] - 18 * im[rgbInOffset4+2]; + Y1 = ((Y1 + 128) >> 8) + 16; Y2 = ((Y2 + 128) >> 8) + 16; Y3 = ((Y3 + 128) >> 8) + 16; Y4 = ((Y4 + 128) >> 8) + 16; + U1 = ((U1 + 128) >> 8) + 128; + U2 = ((U2 + 128) >> 8) + 128; + U3 = ((U3 + 128) >> 8) + 128; + U4 = ((U4 + 128) >> 8) + 128; + + V1 = ((V1 + 128) >> 8) + 128; + V2 = ((V2 + 128) >> 8) + 128; + V3 = ((V3 + 128) >> 8) + 128; + V4 = ((V4 + 128) >> 8) + 128; + (*buffOut)[YOutOffset1] = Y1; (*buffOut)[YOutOffset2] = Y2; (*buffOut)[YOutOffset3] = Y3; (*buffOut)[YOutOffset4] = Y4; + (*buffOut)[VOutOffset] = (unsigned char)((V1+V2+V3+V4)/4.+0.5); + (*buffOut)[UOutOffset] = (unsigned char)((U1+U2+U3+U4)/4.+0.5); } } From 297501b00b5effb262973eb9ff6332516fa08d1c Mon Sep 17 00:00:00 2001 From: TimSC Date: Wed, 18 Dec 2013 14:15:22 +0000 Subject: [PATCH 203/256] Reorganise pixel format code --- pixfmt.cpp | 168 ++++++++++++++++++++++++++++++----------------------- 1 file changed, 95 insertions(+), 73 deletions(-) diff --git a/pixfmt.cpp b/pixfmt.cpp index 4ebcf15..19be62a 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -436,6 +436,7 @@ int ConvertRgb24ToI420orYV12(const unsigned char *im, unsigned dataLen, } else { + //Assume YV12 uPlaneOffset = width * height * 1.25; vPlaneOffset = width * height; } @@ -500,6 +501,54 @@ int ConvertRgb24ToI420orYV12(const unsigned char *im, unsigned dataLen, return 1; } +int ConvertYUYVtoRGB(const unsigned char *im, unsigned dataLen, + int width, int height, + unsigned char **buffOut, + unsigned *buffOutLen) +{ + // Convert buffer from YUYV to RGB. + // For the byte order, see: http://v4l2spec.bytesex.org/spec/r4339.htm + // For the color conversion, see: http://v4l2spec.bytesex.org/spec/x2123.htm + unsigned int outBuffLen = dataLen * 6 / 4; + if(*buffOutLen != 0 && *buffOutLen != outBuffLen) + throw std::runtime_error("Output buffer has incorrect length"); + *buffOutLen = outBuffLen; + char *rgb = (char*)*buffOut; + if(*buffOut == NULL) + { + rgb = new char[*buffOutLen]; + *buffOut = (unsigned char*)rgb; + } + + char *rgb_max = rgb + *buffOutLen; + const unsigned char *yuyv = im; + +#define CLAMP(c) ((c) <= 0 ? 0 : (c) >= 65025 ? 255 : (c) >> 8) + while(rgb < rgb_max) + { + int u = yuyv[1] - 128; + int v = yuyv[3] - 128; + int uv = 100 * u + 208 * v; + u *= 516; + v *= 409; + + int y = 298 * (yuyv[0] - 16); + rgb[0] = CLAMP(y + v); + rgb[1] = CLAMP(y - uv); + rgb[2] = CLAMP(y + u); + + y = 298 * (yuyv[2] - 16); + rgb[3] = CLAMP(y + v); + rgb[4] = CLAMP(y - uv); + rgb[5] = CLAMP(y + u); + + rgb += 6; + yuyv += 4; + } +#undef CLAMP + return 1; +} + // ********************************************************************* int DecodeFrame(const unsigned char *data, unsigned dataLen, @@ -509,9 +558,10 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, unsigned char **buffOut, unsigned *buffOutLen) { + //Check if input format and output format match if(strcmp(inPxFmt, targetPxFmt) == 0) { - //Conversion not required, return a shallow copy + //Conversion not required, return a copy if (*buffOutLen != 0 && *buffOutLen != dataLen) { throw std::runtime_error("Output buffer has incorrect size"); @@ -525,6 +575,7 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, return 1; } + //MJPEG frame to RGB24 if(strcmp(inPxFmt,"MJPEG")==0 && strcmp(targetPxFmt, "RGB24")==0) { std::string jpegBin; @@ -556,51 +607,17 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, return 1; } + //YUYV to RGB24 if(strcmp(inPxFmt,"YUYV")==0 && strcmp(targetPxFmt, "RGB24")==0) { - // Convert buffer from YUYV to RGB. - // For the byte order, see: http://v4l2spec.bytesex.org/spec/r4339.htm - // For the color conversion, see: http://v4l2spec.bytesex.org/spec/x2123.htm - unsigned int outBuffLen = dataLen * 6 / 4; - if(*buffOutLen != 0 && *buffOutLen != outBuffLen) - throw std::runtime_error("Output buffer has incorrect length"); - *buffOutLen = outBuffLen; - char *rgb = (char*)*buffOut; - if(*buffOut == NULL) - { - rgb = new char[*buffOutLen]; - *buffOut = (unsigned char*)rgb; - } - - char *rgb_max = rgb + *buffOutLen; - const unsigned char *yuyv = data; - - #define CLAMP(c) ((c) <= 0 ? 0 : (c) >= 65025 ? 255 : (c) >> 8) - while(rgb < rgb_max) - { - int u = yuyv[1] - 128; - int v = yuyv[3] - 128; - int uv = 100 * u + 208 * v; - u *= 516; - v *= 409; - - int y = 298 * (yuyv[0] - 16); - rgb[0] = CLAMP(y + v); - rgb[1] = CLAMP(y - uv); - rgb[2] = CLAMP(y + u); - - y = 298 * (yuyv[2] - 16); - rgb[3] = CLAMP(y + v); - rgb[4] = CLAMP(y - uv); - rgb[5] = CLAMP(y + u); - - rgb += 6; - yuyv += 4; - } - #undef CLAMP - return 1; + int ret = ConvertYUYVtoRGB(data, dataLen, + width, height, + buffOut, + buffOutLen); + return ret; } + //RGB24 to I420 or YV12 if(strcmp(inPxFmt,"RGB24")==0 && (strcmp(targetPxFmt, "I420")==0 || strcmp(targetPxFmt, "YV12")==0)) { @@ -610,6 +627,7 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, return ret; } + //RGB24 to YUYV or UYVY if(strcmp(inPxFmt,"RGB24")==0 && (strcmp(targetPxFmt, "YUYV")==0 || strcmp(targetPxFmt, "UYVY")==0) @@ -717,7 +735,8 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, return 0; } -// ********************************************************* + +// ************* Resize Code ******************************* int ResizeRgb24ImageNN(const unsigned char *data, unsigned dataLen, int widthIn, int heightIn, @@ -788,6 +807,38 @@ int CropToFitRgb24Image(const unsigned char *data, unsigned dataLen, //******************************************************************* +int ResizeFrame(const unsigned char *data, + unsigned dataLen, + const char *pxFmt, + int srcWidth, int srcHeight, + unsigned char **buffOut, + unsigned *buffOutLen, + int dstWidth, + int dstHeight) +{ + if(strcmp(pxFmt,"RGB24")==0 || strcmp(pxFmt,"BGR24")==0) + { + //Allocate new buffer if needed + int dstBuffSize = 3 * dstWidth * dstHeight; + if(*buffOutLen != 0 && *buffOutLen != dstBuffSize) + throw std::runtime_error("Output buffer has incorrect size"); + *buffOutLen = dstBuffSize; + if(*buffOut == NULL) + *buffOut = new unsigned char [*buffOutLen]; + + return ResizeRgb24ImageNN(data, dataLen, + srcWidth, srcHeight, + *buffOut, + *buffOutLen, + dstWidth, dstHeight, 0, 3); + } + //Not supported + return 0; +} + +// ****** Combined resize and convert ************************************************* + + int DecodeAndResizeFrame(const unsigned char *data, unsigned dataLen, const char *inPxFmt, @@ -883,33 +934,4 @@ int DecodeAndResizeFrame(const unsigned char *data, } return resizeRet; -} - -int ResizeFrame(const unsigned char *data, - unsigned dataLen, - const char *pxFmt, - int srcWidth, int srcHeight, - unsigned char **buffOut, - unsigned *buffOutLen, - int dstWidth, - int dstHeight) -{ - if(strcmp(pxFmt,"RGB24")==0 || strcmp(pxFmt,"BGR24")==0) - { - //Allocate new buffer if needed - int dstBuffSize = 3 * dstWidth * dstHeight; - if(*buffOutLen != 0 && *buffOutLen != dstBuffSize) - throw std::runtime_error("Output buffer has incorrect size"); - *buffOutLen = dstBuffSize; - if(*buffOut == NULL) - *buffOut = new unsigned char [*buffOutLen]; - - return ResizeRgb24ImageNN(data, dataLen, - srcWidth, srcHeight, - *buffOut, - *buffOutLen, - dstWidth, dstHeight, 0, 3); - } - //Not supported - return 0; -} +} \ No newline at end of file From 3f338a5f584d1d38de06ad4ed3653af9f32492fb Mon Sep 17 00:00:00 2001 From: TimSC Date: Wed, 18 Dec 2013 14:44:39 +0000 Subject: [PATCH 204/256] Colour I420 now working --- videooutfile.cpp | 3 ++- videooutfile.py | 6 +++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/videooutfile.cpp b/videooutfile.cpp index 747b110..4bed30c 100644 --- a/videooutfile.cpp +++ b/videooutfile.cpp @@ -92,7 +92,7 @@ PyObject *Video_out_file_manager_open(Video_out_file_manager *self, PyObject *ar PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObject *args) { - printf("Video_out_manager_Send_frame\n"); + printf("Video_out_file_manager_Send_frame\n"); //dev = '\\dev\\video0', img, pixel_format, width, height //Process arguments @@ -140,6 +140,7 @@ PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObje } else { + std::cout << "devarg " << devarg << std::endl; PyErr_Format(PyExc_RuntimeError, "Device not found."); Py_RETURN_NONE; } diff --git a/videooutfile.py b/videooutfile.py index 8b2446f..8891914 100644 --- a/videooutfile.py +++ b/videooutfile.py @@ -14,8 +14,12 @@ realTimeFrames = 0 - outManager.open("test.wmv", "YV12", 640, 480) + outManager.open("test.wmv", 640, 480) + print "set_video_codec" + outManager.set_video_codec("test.wmv", "H264", 800000) + print "set_frame_rate" outManager.set_frame_rate("test.wmv", 25) + print "enable_real_time_frames", realTimeFrames outManager.enable_real_time_frame_rate("test.wmv", realTimeFrames) imgLen = w * h * 3 From f9f9afc59b39f384091543d1dad693404a36ac53 Mon Sep 17 00:00:00 2001 From: TimSC Date: Wed, 18 Dec 2013 15:00:16 +0000 Subject: [PATCH 205/256] Change default colour encoding and function params --- libvideolive.cpp | 2 +- mfvideooutfile.cpp | 5 +++++ videooutfile.cpp | 7 ++----- videooutfile.py | 11 ++++++----- 4 files changed, 14 insertions(+), 11 deletions(-) diff --git a/libvideolive.cpp b/libvideolive.cpp index a7e20de..426c58d 100644 --- a/libvideolive.cpp +++ b/libvideolive.cpp @@ -128,7 +128,7 @@ static PyTypeObject Video_out_manager_type = { static PyMethodDef Video_out_file_manager_methods[] = { {"open", (PyCFunction)Video_out_file_manager_open, METH_VARARGS, - "open(filename = 'out.wmv', pixel_format, width, height)\n\n" + "open(filename = 'out.wmv', width, height)\n\n" "Open video output."}, {"send_frame", (PyCFunction)Video_out_file_manager_Send_frame, METH_VARARGS, "send_frame(filename = 'out.wmv', img, pixel_format, width, height)\n\n" diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index 77de4d9..43f7b7c 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -498,7 +498,12 @@ void MfVideoOutFile::SetVideoCodec(const char *codec, UINT32 bitrateIn) throw std::runtime_error("Set video parameters before opening video file"); } if(codec!=NULL) + { this->videoCodec = codec; + + + + } if(bitrateIn > 0) this->bitRate = bitrateIn; } diff --git a/videooutfile.cpp b/videooutfile.cpp index 4bed30c..0c6971d 100644 --- a/videooutfile.cpp +++ b/videooutfile.cpp @@ -38,11 +38,10 @@ PyObject *Video_out_file_manager_open(Video_out_file_manager *self, PyObject *ar //Process arguments const char *devarg = NULL; - const char *pxFmtIn = NULL; int widthIn = 0; int heightIn = 0; - if(!PyArg_ParseTuple(args, "ssii", &devarg, &pxFmtIn, &widthIn, &heightIn)) + if(!PyArg_ParseTuple(args, "sii", &devarg, &widthIn, &heightIn)) { PyErr_Format(PyExc_RuntimeError, "Incorrect arguments to function."); Py_RETURN_NONE; @@ -71,7 +70,6 @@ PyObject *Video_out_file_manager_open(Video_out_file_manager *self, PyObject *ar try { threadArgs->SetOutputSize(widthIn, heightIn); - threadArgs->SetOutputPxFmt(pxFmtIn); } catch(std::exception &err) { @@ -92,7 +90,7 @@ PyObject *Video_out_file_manager_open(Video_out_file_manager *self, PyObject *ar PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObject *args) { - printf("Video_out_file_manager_Send_frame\n"); + //printf("Video_out_file_manager_Send_frame\n"); //dev = '\\dev\\video0', img, pixel_format, width, height //Process arguments @@ -140,7 +138,6 @@ PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObje } else { - std::cout << "devarg " << devarg << std::endl; PyErr_Format(PyExc_RuntimeError, "Device not found."); Py_RETURN_NONE; } diff --git a/videooutfile.py b/videooutfile.py index 8891914..48ad30e 100644 --- a/videooutfile.py +++ b/videooutfile.py @@ -11,16 +11,17 @@ print lena.shape w = lena.shape[1] h = lena.shape[0] + fina = "test.mp4" realTimeFrames = 0 - outManager.open("test.wmv", 640, 480) + outManager.open(fina, 640, 480) print "set_video_codec" - outManager.set_video_codec("test.wmv", "H264", 800000) + outManager.set_video_codec(fina, "WMV3", 800000) print "set_frame_rate" - outManager.set_frame_rate("test.wmv", 25) + outManager.set_frame_rate(fina, 25) print "enable_real_time_frames", realTimeFrames - outManager.enable_real_time_frame_rate("test.wmv", realTimeFrames) + outManager.enable_real_time_frame_rate(fina, realTimeFrames) imgLen = w * h * 3 #img = np.ones(shape=(imgLen,), dtype=np.uint8) * 0 @@ -37,7 +38,7 @@ # img[i] = 128 print "Frame", frNum - outManager.send_frame("test.wmv", str(lena.tostring()), "RGB24", w, h) + outManager.send_frame(fina, str(lena.tostring()), "RGB24", w, h) if realTimeFrames: time.sleep(frNum / 500.) From d9ec39ff3536395e3b248eecabbdbb2b161739d5 Mon Sep 17 00:00:00 2001 From: TimSC Date: Wed, 18 Dec 2013 15:32:22 +0000 Subject: [PATCH 206/256] Support different container formats --- mfvideooutfile.cpp | 40 +++++++++++++++++++++++++++++++++++++--- videooutfile.py | 4 ++-- 2 files changed, 39 insertions(+), 5 deletions(-) diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index 43f7b7c..1456563 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -124,10 +124,44 @@ void MfVideoOutFile::OpenFile() if(!this->variableFrameRateEnabled) MFFrameRateToAverageTimePerFrame(this->frameRateFps, 1, &this->rtDuration); - HRESULT hr = MFCreateSinkWriterFromURL(this->fina.c_str(), NULL, NULL, &pSinkWriter); - if (!SUCCEEDED(hr)) + IMFAttributes *containerAttributes = NULL; + HRESULT hr = MFCreateAttributes(&containerAttributes, 0); + + IMFByteStream *pIByteStream = NULL; + + if (SUCCEEDED(hr)) + { + hr = MFCreateFile(MF_ACCESSMODE_READWRITE, + MF_OPENMODE_DELETE_IF_EXIST, + MF_FILEFLAGS_NONE, + this->fina.c_str(), + &pIByteStream); + if (!SUCCEEDED(hr)) errMsg = "MFCreateFile failed"; + } + + if(containerAttributes!=NULL) + { + int len4 = this->fina.size() - 4; + if(len4 < 0) len4 = 0; + const wchar_t *ext4 = &this->fina.c_str()[len4]; + if(wcscmp(ext4, L".mp4")==0) + containerAttributes->SetGUID(MF_TRANSCODE_CONTAINERTYPE, MFTranscodeContainerType_MPEG4); + if(wcscmp(ext4, L".asf")==0) + containerAttributes->SetGUID(MF_TRANSCODE_CONTAINERTYPE, MFTranscodeContainerType_ASF); + if(wcscmp(ext4, L".wmv")==0) + containerAttributes->SetGUID(MF_TRANSCODE_CONTAINERTYPE, MFTranscodeContainerType_ASF); + if(wcscmp(ext4, L".mp3")==0) + containerAttributes->SetGUID(MF_TRANSCODE_CONTAINERTYPE, MFTranscodeContainerType_MP3); +#ifdef MFTranscodeContainerType_AVI + if(wcscmp(ext4, L".avi")==0) + containerAttributes->SetGUID(MF_TRANSCODE_CONTAINERTYPE, MFTranscodeContainerType_AVI); +#endif + } + + if (SUCCEEDED(hr)) { - errMsg = "MFCreateSinkWriterFromURL failed"; + hr = MFCreateSinkWriterFromURL(this->fina.c_str(), pIByteStream, containerAttributes, &pSinkWriter); + if (!SUCCEEDED(hr)) errMsg = "MFCreateSinkWriterFromURL failed"; } // Set the output media type. diff --git a/videooutfile.py b/videooutfile.py index 48ad30e..9ef70e8 100644 --- a/videooutfile.py +++ b/videooutfile.py @@ -11,13 +11,13 @@ print lena.shape w = lena.shape[1] h = lena.shape[0] - fina = "test.mp4" + fina = "test.wmv" realTimeFrames = 0 outManager.open(fina, 640, 480) print "set_video_codec" - outManager.set_video_codec(fina, "WMV3", 800000) + outManager.set_video_codec(fina, "H264", 800000) print "set_frame_rate" outManager.set_frame_rate(fina, 25) print "enable_real_time_frames", realTimeFrames From ee99a47035ee4865d1ac56a2782d643e6bbce3a5 Mon Sep 17 00:00:00 2001 From: TimSC Date: Wed, 18 Dec 2013 17:59:51 +0000 Subject: [PATCH 207/256] Catch decode exception and convert to python exception --- videoin.cpp | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/videoin.cpp b/videoin.cpp index 1cfaa8a..db73066 100644 --- a/videoin.cpp +++ b/videoin.cpp @@ -139,7 +139,7 @@ PyObject *Device_manager_Start(Device_manager *self, PyObject *args) PyObject *Device_manager_Get_frame(Device_manager *self, PyObject *args) { - + //std::cout << "Device_manager_Get_frame" << std::endl; //Process arguments const char *devarg = "/dev/video0"; if(PyTuple_Size(args) >= 1) @@ -159,8 +159,17 @@ PyObject *Device_manager_Get_frame(Device_manager *self, PyObject *args) class Base_Video_In *threadArgs = (*self->threadArgStore)[devarg]; unsigned char *buffOut = NULL; class FrameMetaData metaOut; + int ok = 0; + try + { + ok = threadArgs->GetFrame(&buffOut, &metaOut); + } + catch(std::exception &err) + { + PyErr_Format(PyExc_RuntimeError, err.what()); + Py_RETURN_NONE; + } - int ok = threadArgs->GetFrame(&buffOut, &metaOut); if(ok && buffOut != NULL) { //Format output to python From aaa70ff492fe010772e306d0fedf2687ee03c969 Mon Sep 17 00:00:00 2001 From: TimSC Date: Wed, 18 Dec 2013 18:04:13 +0000 Subject: [PATCH 208/256] Remember to initialise buffer len to zero --- mfvideoin.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index e68ba00..2345444 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -586,7 +586,7 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) //Do conversion to rgb unsigned char *buffConv = NULL; - unsigned buffConvLen; + unsigned buffConvLen = 0; int ok = DecodeFrame(currentBuff, currentBuffLen, currentPixFmt.c_str(), this->widthBuff[0], this->heightBuff[0], From c0548354a3864b29e364f0c4222fd0bf1ff278f6 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Wed, 18 Dec 2013 18:54:12 +0000 Subject: [PATCH 209/256] Change types to be linux compatible --- base.h | 4 ++-- mfvideooutfile.cpp | 4 ++-- videooutfile.cpp | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/base.h b/base.h index f2ee0ea..12920d5 100644 --- a/base.h +++ b/base.h @@ -75,8 +75,8 @@ class Base_Video_Out virtual int WaitForStop() {return 1;}; virtual void SetOutputSize(int width, int height) {}; virtual void SetOutputPxFmt(const char *fmt) {}; - virtual void SetFrameRate(UINT32 frameRateIn) {}; - virtual void SetVideoCodec(const char *codec, UINT32 bitrate) {}; + virtual void SetFrameRate(unsigned int frameRateIn) {}; + virtual void SetVideoCodec(const char *codec, unsigned int bitrate) {}; virtual void EnableRealTimeFrameRate(int varEnable) {}; void Run() {}; diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index 1456563..7b7e493 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -516,7 +516,7 @@ void MfVideoOutFile::SetOutputPxFmt(const char *fmt) this->pxFmt = fmt; } -void MfVideoOutFile::SetFrameRate(UINT32 frameRateIn) +void MfVideoOutFile::SetFrameRate(unsigned int frameRateIn) { if(this->pSinkWriter != NULL) { @@ -525,7 +525,7 @@ void MfVideoOutFile::SetFrameRate(UINT32 frameRateIn) this->frameRateFps = frameRateIn; } -void MfVideoOutFile::SetVideoCodec(const char *codec, UINT32 bitrateIn) +void MfVideoOutFile::SetVideoCodec(const char *codec, unsigned int bitrateIn) { if(this->pSinkWriter != NULL) { diff --git a/videooutfile.cpp b/videooutfile.cpp index 0c6971d..2254adf 100644 --- a/videooutfile.cpp +++ b/videooutfile.cpp @@ -49,11 +49,11 @@ PyObject *Video_out_file_manager_open(Video_out_file_manager *self, PyObject *ar //Create worker thread pthread_t thread; - MfVideoOutFile *threadArgs = NULL; #ifdef _POSIX //TODO #endif #ifdef _NT + MfVideoOutFile *threadArgs = NULL; try { threadArgs = new MfVideoOutFile(devarg); From 032098e31af7dc34a5bec440bff40440492bbbf0 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Wed, 18 Dec 2013 19:11:50 +0000 Subject: [PATCH 210/256] Check start of jpeg --- pixfmt.cpp | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pixfmt.cpp b/pixfmt.cpp index 19be62a..878b4d7 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -229,6 +229,10 @@ int ReadJpegFile(unsigned char * inbuffer, /* This struct contains the JPEG decompression parameters and pointers to * working space (which is allocated as needed by the JPEG library). */ + + if(inbuffer[0] != 0xFF || inbuffer[1] != 0xD8) + return 0; + struct jpeg_decompress_struct cinfo; struct my_error_mgr jerr; *outBuffer = NULL; @@ -934,4 +938,4 @@ int DecodeAndResizeFrame(const unsigned char *data, } return resizeRet; -} \ No newline at end of file +} From 32db39b5af2c4269f02aa1457df0b720035c3020 Mon Sep 17 00:00:00 2001 From: TimSC Date: Wed, 18 Dec 2013 22:33:33 +0000 Subject: [PATCH 211/256] Disable unnecessary code --- videoin.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/videoin.h b/videoin.h index 63883fe..29b2bc1 100644 --- a/videoin.h +++ b/videoin.h @@ -7,12 +7,12 @@ #include #include -typedef struct { +/*typedef struct { PyObject_HEAD int fd; struct buffer *buffers; int buffer_count; -} Video_device; +} Video_device;*/ class Device_manager_cl{ public: From cc3efdfe408bd76db2221f1557f5fb061107e596 Mon Sep 17 00:00:00 2001 From: TimSC Date: Wed, 18 Dec 2013 22:45:18 +0000 Subject: [PATCH 212/256] Dropped frames were leaking memory --- mfvideoin.cpp | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index 2345444..253693a 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -318,6 +318,9 @@ class SourceReaderCB : public IMFSourceReaderCallback while(this->frameBuff.size() > this->maxNumFrames) { //Drop an old frame if buffer is starting to overflow + char *frameToDrop = frameBuff[0]; + delete [] frameToDrop; + frameToDrop = NULL; frameBuff.erase(frameBuff.begin()); frameLenBuff.erase(frameLenBuff.begin()); hrStatusBuff.erase(hrStatusBuff.begin()); @@ -356,6 +359,7 @@ class SourceReaderCB : public IMFSourceReaderCallback dwStreamIndexBuff.push_back(dwStreamIndex); dwStreamFlagsBuff.push_back(dwStreamFlags); llTimestampBuff.push_back(llTimestamp); + std::cout << "Callback buff: " << frameBuff.size() << std::endl; this->CheckForBufferOverflow(); } @@ -447,7 +451,9 @@ class SourceReaderCB : public IMFSourceReaderCallback this->dwStreamFlagsBuff.erase(this->dwStreamFlagsBuff.begin()); this->llTimestampBuff.erase(this->llTimestampBuff.begin()); ret = 1; + std::cout << "Callback buff: " << frameBuff.size() << std::endl; } + LeaveCriticalSection(&lock); return ret; } @@ -623,6 +629,8 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) this->dwStreamFlagsBuff.erase(this->dwStreamFlagsBuff.begin()); this->llTimestampBuff.erase(this->llTimestampBuff.begin()); + std::cout << "Vidin buff: " << this->frameBuff.size() << std::endl; + this->PopFrontMetaDataBuff(); LeaveCriticalSection(&lock); @@ -858,6 +866,10 @@ void MfVideoIn::ReadFramesInternal() //Ensure the buffer does not overflow while(this->frameBuff.size() >= this->maxBuffSize) { + std::cout << "Dropping frame from vidin buff" << std::endl; + char *frameToDrop = this->frameBuff[0]; + delete [] frameToDrop; + frameToDrop = NULL; this->frameBuff.erase(this->frameBuff.begin()); this->frameLenBuff.erase(this->frameLenBuff.begin()); this->hrStatusBuff.erase(this->hrStatusBuff.begin()); @@ -877,6 +889,7 @@ void MfVideoIn::ReadFramesInternal() this->dwStreamIndexBuff.push_back(dwStreamIndex); this->dwStreamFlagsBuff.push_back(dwStreamFlags); this->llTimestampBuff.push_back(llTimestamp); + std::cout << "Vidin buff: " << this->frameBuff.size() << std::endl; this->SetSampleMetaData(dwStreamIndex); } From 93404f75c0b5c8be3f95928ee17d8d6d83b39d54 Mon Sep 17 00:00:00 2001 From: TimSC Date: Wed, 18 Dec 2013 22:46:25 +0000 Subject: [PATCH 213/256] Remove debug messages --- mfvideoin.cpp | 6 ------ 1 file changed, 6 deletions(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index 253693a..06e66c3 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -359,7 +359,6 @@ class SourceReaderCB : public IMFSourceReaderCallback dwStreamIndexBuff.push_back(dwStreamIndex); dwStreamFlagsBuff.push_back(dwStreamFlags); llTimestampBuff.push_back(llTimestamp); - std::cout << "Callback buff: " << frameBuff.size() << std::endl; this->CheckForBufferOverflow(); } @@ -451,7 +450,6 @@ class SourceReaderCB : public IMFSourceReaderCallback this->dwStreamFlagsBuff.erase(this->dwStreamFlagsBuff.begin()); this->llTimestampBuff.erase(this->llTimestampBuff.begin()); ret = 1; - std::cout << "Callback buff: " << frameBuff.size() << std::endl; } LeaveCriticalSection(&lock); @@ -629,8 +627,6 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) this->dwStreamFlagsBuff.erase(this->dwStreamFlagsBuff.begin()); this->llTimestampBuff.erase(this->llTimestampBuff.begin()); - std::cout << "Vidin buff: " << this->frameBuff.size() << std::endl; - this->PopFrontMetaDataBuff(); LeaveCriticalSection(&lock); @@ -866,7 +862,6 @@ void MfVideoIn::ReadFramesInternal() //Ensure the buffer does not overflow while(this->frameBuff.size() >= this->maxBuffSize) { - std::cout << "Dropping frame from vidin buff" << std::endl; char *frameToDrop = this->frameBuff[0]; delete [] frameToDrop; frameToDrop = NULL; @@ -889,7 +884,6 @@ void MfVideoIn::ReadFramesInternal() this->dwStreamIndexBuff.push_back(dwStreamIndex); this->dwStreamFlagsBuff.push_back(dwStreamFlags); this->llTimestampBuff.push_back(llTimestamp); - std::cout << "Vidin buff: " << this->frameBuff.size() << std::endl; this->SetSampleMetaData(dwStreamIndex); } From f5897f498c76012477603a78da4c34597dd393ff Mon Sep 17 00:00:00 2001 From: TimSC Date: Wed, 18 Dec 2013 23:45:02 +0000 Subject: [PATCH 214/256] Exceptions should return null not none --- videooutfile.cpp | 37 +++++++++++++++++++------------------ 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/videooutfile.cpp b/videooutfile.cpp index 2254adf..07d92c2 100644 --- a/videooutfile.cpp +++ b/videooutfile.cpp @@ -43,8 +43,9 @@ PyObject *Video_out_file_manager_open(Video_out_file_manager *self, PyObject *ar if(!PyArg_ParseTuple(args, "sii", &devarg, &widthIn, &heightIn)) { - PyErr_Format(PyExc_RuntimeError, "Incorrect arguments to function."); - Py_RETURN_NONE; + std::cout << "Attempt to throw exception" << std::endl; + PyErr_SetString(PyExc_RuntimeError, "Incorrect arguments to function."); + return NULL; } //Create worker thread @@ -60,8 +61,8 @@ PyObject *Video_out_file_manager_open(Video_out_file_manager *self, PyObject *ar } catch(std::exception &err) { - PyErr_Format(PyExc_RuntimeError, err.what()); - Py_RETURN_NONE; + PyErr_SetString(PyExc_RuntimeError, err.what()); + return NULL; } #endif @@ -74,7 +75,7 @@ PyObject *Video_out_file_manager_open(Video_out_file_manager *self, PyObject *ar catch(std::exception &err) { PyErr_SetString(PyExc_RuntimeError, err.what()); - Py_RETURN_NONE; + return NULL; } #endif @@ -103,7 +104,7 @@ PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObje if(PyObject_Length(args) < 5) { PyErr_Format(PyExc_RuntimeError, "Too few arguments."); - Py_RETURN_NONE; + return NULL; } PyObject *pydev = PyTuple_GetItem(args, 0); @@ -133,13 +134,13 @@ PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObje catch (std::exception &err) { PyErr_Format(PyExc_RuntimeError, err.what()); - Py_RETURN_NONE; + return NULL; } } else { PyErr_Format(PyExc_RuntimeError, "Device not found."); - Py_RETURN_NONE; + return NULL; } Py_RETURN_NONE; @@ -167,7 +168,7 @@ PyObject *Video_out_file_manager_close(Video_out_file_manager *self, PyObject *a catch(std::exception &err) { PyErr_Format(PyExc_RuntimeError, err.what()); - Py_RETURN_NONE; + return NULL; } } @@ -184,7 +185,7 @@ PyObject *Video_out_file_manager_Set_Frame_Rate(Video_out_file_manager *self, Py if(PyObject_Length(args) < 2) { PyErr_Format(PyExc_RuntimeError, "Too few arguments."); - Py_RETURN_NONE; + return NULL; } PyObject *pydev = PyTuple_GetItem(args, 0); @@ -204,14 +205,14 @@ PyObject *Video_out_file_manager_Set_Frame_Rate(Video_out_file_manager *self, Py catch(std::exception &err) { PyErr_Format(PyExc_RuntimeError, err.what()); - Py_RETURN_NONE; + return NULL; } } else { PyErr_Format(PyExc_RuntimeError, "Device not found."); - Py_RETURN_NONE; + return NULL; } Py_RETURN_NONE; @@ -227,7 +228,7 @@ PyObject *Video_out_file_manager_Set_Video_Codec(Video_out_file_manager *self, P if(PyObject_Length(args) < 2) { PyErr_Format(PyExc_RuntimeError, "Too few arguments."); - Py_RETURN_NONE; + return NULL; } PyObject *pydev = PyTuple_GetItem(args, 0); @@ -256,14 +257,14 @@ PyObject *Video_out_file_manager_Set_Video_Codec(Video_out_file_manager *self, P catch(std::exception &err) { PyErr_Format(PyExc_RuntimeError, err.what()); - Py_RETURN_NONE; + return NULL; } } else { PyErr_Format(PyExc_RuntimeError, "Device not found."); - Py_RETURN_NONE; + return NULL; } Py_RETURN_NONE; @@ -278,7 +279,7 @@ PyObject *Video_out_file_manager_Enable_Real_Time_Frame_Rate(Video_out_file_mana if(PyObject_Length(args) < 2) { PyErr_Format(PyExc_RuntimeError, "Too few arguments."); - Py_RETURN_NONE; + return NULL; } PyObject *pydev = PyTuple_GetItem(args, 0); @@ -298,14 +299,14 @@ PyObject *Video_out_file_manager_Enable_Real_Time_Frame_Rate(Video_out_file_mana catch(std::exception &err) { PyErr_Format(PyExc_RuntimeError, err.what()); - Py_RETURN_NONE; + return NULL; } } else { PyErr_Format(PyExc_RuntimeError, "Device not found."); - Py_RETURN_NONE; + return NULL; } Py_RETURN_NONE; From 203d61e5863bdb54e46cc788072be46c7e2194f1 Mon Sep 17 00:00:00 2001 From: TimSC Date: Wed, 18 Dec 2013 23:48:14 +0000 Subject: [PATCH 215/256] Exceptions should return null not none --- videoin.cpp | 17 +++++++++-------- videoout.cpp | 8 ++++---- 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/videoin.cpp b/videoin.cpp index db73066..6135e46 100644 --- a/videoin.cpp +++ b/videoin.cpp @@ -47,7 +47,7 @@ PyObject *Device_manager_open(Device_manager *self, PyObject *args) if(it!=self->threadArgStore->end()) { PyErr_Format(PyExc_RuntimeError, "Device already opened."); - Py_RETURN_NONE; + return NULL; } pthread_t thread; @@ -88,7 +88,8 @@ PyObject *Device_manager_set_format(Device_manager *self, PyObject *args) if(!PyArg_ParseTuple(args, "sii|s", &devarg, &size_x, &size_y, &fmt)) { - Py_RETURN_NONE; + PyErr_BadArgument(); + return NULL; } //Check this device is valid @@ -96,7 +97,7 @@ PyObject *Device_manager_set_format(Device_manager *self, PyObject *args) if(it==self->threadArgStore->end()) { PyErr_Format(PyExc_RuntimeError, "Device already not ready."); - Py_RETURN_NONE; + return NULL; } class Base_Video_In *threadArgs = (*self->threadArgStore)[devarg]; @@ -128,7 +129,7 @@ PyObject *Device_manager_Start(Device_manager *self, PyObject *args) if(it==self->threadArgStore->end()) { PyErr_Format(PyExc_RuntimeError, "Device already not ready."); - Py_RETURN_NONE; + return NULL; } class Base_Video_In *threadArgs = (*self->threadArgStore)[devarg]; @@ -153,7 +154,7 @@ PyObject *Device_manager_Get_frame(Device_manager *self, PyObject *args) if(it==self->threadArgStore->end()) { PyErr_Format(PyExc_RuntimeError, "Device already not ready."); - Py_RETURN_NONE; + return NULL; } class Base_Video_In *threadArgs = (*self->threadArgStore)[devarg]; @@ -167,7 +168,7 @@ PyObject *Device_manager_Get_frame(Device_manager *self, PyObject *args) catch(std::exception &err) { PyErr_Format(PyExc_RuntimeError, err.what()); - Py_RETURN_NONE; + return NULL; } if(ok && buffOut != NULL) @@ -207,7 +208,7 @@ PyObject *Device_manager_stop(Device_manager *self, PyObject *args) if(it==self->threadArgStore->end()) { PyErr_Format(PyExc_RuntimeError, "Device already not ready."); - Py_RETURN_NONE; + return NULL; } class Base_Video_In *threadArgs = (*self->threadArgStore)[devarg]; @@ -231,7 +232,7 @@ PyObject *Device_manager_close(Device_manager *self, PyObject *args) if(it==self->threadArgStore->end()) { PyErr_Format(PyExc_RuntimeError, "Device already not ready."); - Py_RETURN_NONE; + return NULL; } class Base_Video_In *threadArgs = (*self->threadArgStore)[devarg]; diff --git a/videoout.cpp b/videoout.cpp index 96f84e1..827ca10 100644 --- a/videoout.cpp +++ b/videoout.cpp @@ -46,7 +46,7 @@ PyObject *Video_out_manager_open(Video_out_manager *self, PyObject *args) { std::cout << "err" << std::endl; PyErr_Format(PyExc_RuntimeError, "Incorrect arguments to function."); - Py_RETURN_NONE; + return NULL; } //Create worker thread @@ -87,7 +87,7 @@ PyObject *Video_out_manager_Send_frame(Video_out_manager *self, PyObject *args) if(PyObject_Length(args) < 5) { PyErr_Format(PyExc_RuntimeError, "Too few arguments."); - Py_RETURN_NONE; + return NULL; } PyObject *pydev = PyTuple_GetItem(args, 0); @@ -117,13 +117,13 @@ PyObject *Video_out_manager_Send_frame(Video_out_manager *self, PyObject *args) catch(std::exception &err) { PyErr_Format(PyExc_RuntimeError, err.what()); - Py_RETURN_NONE; + return NULL; } } else { PyErr_Format(PyExc_RuntimeError, "Device not found."); - Py_RETURN_NONE; + return NULL; } Py_RETURN_NONE; From 29bce84341d72f8148e956a514e82eb04334408b Mon Sep 17 00:00:00 2001 From: TimSC Date: Thu, 19 Dec 2013 00:07:46 +0000 Subject: [PATCH 216/256] Set default frame rate for video out --- mfvideooutfile.cpp | 7 +++++-- videoout.cpp | 1 - 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index 7b7e493..c8521ad 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -96,7 +96,7 @@ MfVideoOutFile::MfVideoOutFile(const char *fiName) : Base_Video_Out() this->outputHeight = 480; this->bitRate = 800000; this->fina = CStringToWString(fiName); - this->frameRateFps = 0; + this->frameRateFps = 25; this->prevFrameDuration = 0; this->variableFrameRateEnabled = 0; SetTimeToZero(this->startVideoTime); @@ -136,7 +136,10 @@ void MfVideoOutFile::OpenFile() MF_FILEFLAGS_NONE, this->fina.c_str(), &pIByteStream); - if (!SUCCEEDED(hr)) errMsg = "MFCreateFile failed"; + if (!SUCCEEDED(hr)) + { + errMsg = "MFCreateFile failed"; + } } if(containerAttributes!=NULL) diff --git a/videoout.cpp b/videoout.cpp index 827ca10..73cc635 100644 --- a/videoout.cpp +++ b/videoout.cpp @@ -44,7 +44,6 @@ PyObject *Video_out_manager_open(Video_out_manager *self, PyObject *args) if(!PyArg_ParseTuple(args, "ssii", &devarg, &pxFmtIn, &widthIn, &heightIn)) { - std::cout << "err" << std::endl; PyErr_Format(PyExc_RuntimeError, "Incorrect arguments to function."); return NULL; } From 66404fe470afddfdfe9a4e41aa6a940283be53e0 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Thu, 19 Dec 2013 10:30:51 +0000 Subject: [PATCH 217/256] Change exceptions to use PyErr_SetString --- libvideolive.cpp | 6 ++---- videoin.cpp | 14 +++++++------- videoout.cpp | 8 ++++---- videooutfile.cpp | 26 +++++++++++++------------- 4 files changed, 26 insertions(+), 28 deletions(-) diff --git a/libvideolive.cpp b/libvideolive.cpp index 426c58d..f865939 100644 --- a/libvideolive.cpp +++ b/libvideolive.cpp @@ -30,8 +30,7 @@ PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) if(PyTuple_Size(args) < 1) { - PyErr_BadArgument(); - PyErr_Format(PyExc_TypeError, "Function requires 1 argument"); + PyErr_SetString(PyExc_TypeError, "Function requires 1 argument"); Py_RETURN_NONE; } @@ -39,8 +38,7 @@ PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) if(!PyString_Check(inBuffer)) { - PyErr_BadArgument(); - PyErr_Format(PyExc_TypeError, "Argument 1 must be a string."); + PyErr_SetString(PyExc_TypeError, "Argument 1 must be a string."); //PyObject* type = PyObject_Type(inBuffer); //PyObject_Print(type, stdout, Py_PRINT_RAW); //Py_CLEAR(type); diff --git a/videoin.cpp b/videoin.cpp index 6135e46..54c5ee6 100644 --- a/videoin.cpp +++ b/videoin.cpp @@ -46,7 +46,7 @@ PyObject *Device_manager_open(Device_manager *self, PyObject *args) std::map::iterator it = self->threadArgStore->find(devarg); if(it!=self->threadArgStore->end()) { - PyErr_Format(PyExc_RuntimeError, "Device already opened."); + PyErr_SetString(PyExc_RuntimeError, "Device already opened."); return NULL; } @@ -96,7 +96,7 @@ PyObject *Device_manager_set_format(Device_manager *self, PyObject *args) std::map::iterator it = self->threadArgStore->find(devarg); if(it==self->threadArgStore->end()) { - PyErr_Format(PyExc_RuntimeError, "Device already not ready."); + PyErr_SetString(PyExc_RuntimeError, "Device already not ready."); return NULL; } @@ -128,7 +128,7 @@ PyObject *Device_manager_Start(Device_manager *self, PyObject *args) std::map::iterator it = self->threadArgStore->find(devarg); if(it==self->threadArgStore->end()) { - PyErr_Format(PyExc_RuntimeError, "Device already not ready."); + PyErr_SetString(PyExc_RuntimeError, "Device already not ready."); return NULL; } @@ -153,7 +153,7 @@ PyObject *Device_manager_Get_frame(Device_manager *self, PyObject *args) std::map::iterator it = self->threadArgStore->find(devarg); if(it==self->threadArgStore->end()) { - PyErr_Format(PyExc_RuntimeError, "Device already not ready."); + PyErr_SetString(PyExc_RuntimeError, "Device already not ready."); return NULL; } @@ -167,7 +167,7 @@ PyObject *Device_manager_Get_frame(Device_manager *self, PyObject *args) } catch(std::exception &err) { - PyErr_Format(PyExc_RuntimeError, err.what()); + PyErr_SetString(PyExc_RuntimeError, err.what()); return NULL; } @@ -207,7 +207,7 @@ PyObject *Device_manager_stop(Device_manager *self, PyObject *args) std::map::iterator it = self->threadArgStore->find(devarg); if(it==self->threadArgStore->end()) { - PyErr_Format(PyExc_RuntimeError, "Device already not ready."); + PyErr_SetString(PyExc_RuntimeError, "Device already not ready."); return NULL; } @@ -231,7 +231,7 @@ PyObject *Device_manager_close(Device_manager *self, PyObject *args) std::map::iterator it = self->threadArgStore->find(devarg); if(it==self->threadArgStore->end()) { - PyErr_Format(PyExc_RuntimeError, "Device already not ready."); + PyErr_SetString(PyExc_RuntimeError, "Device already not ready."); return NULL; } diff --git a/videoout.cpp b/videoout.cpp index 73cc635..3994543 100644 --- a/videoout.cpp +++ b/videoout.cpp @@ -44,7 +44,7 @@ PyObject *Video_out_manager_open(Video_out_manager *self, PyObject *args) if(!PyArg_ParseTuple(args, "ssii", &devarg, &pxFmtIn, &widthIn, &heightIn)) { - PyErr_Format(PyExc_RuntimeError, "Incorrect arguments to function."); + PyErr_SetString(PyExc_RuntimeError, "Incorrect arguments to function."); return NULL; } @@ -85,7 +85,7 @@ PyObject *Video_out_manager_Send_frame(Video_out_manager *self, PyObject *args) if(PyObject_Length(args) < 5) { - PyErr_Format(PyExc_RuntimeError, "Too few arguments."); + PyErr_SetString(PyExc_RuntimeError, "Too few arguments."); return NULL; } @@ -115,13 +115,13 @@ PyObject *Video_out_manager_Send_frame(Video_out_manager *self, PyObject *args) } catch(std::exception &err) { - PyErr_Format(PyExc_RuntimeError, err.what()); + PyErr_SetString(PyExc_RuntimeError, err.what()); return NULL; } } else { - PyErr_Format(PyExc_RuntimeError, "Device not found."); + PyErr_SetString(PyExc_RuntimeError, "Device not found."); return NULL; } diff --git a/videooutfile.cpp b/videooutfile.cpp index 07d92c2..fbaa0d4 100644 --- a/videooutfile.cpp +++ b/videooutfile.cpp @@ -103,7 +103,7 @@ PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObje if(PyObject_Length(args) < 5) { - PyErr_Format(PyExc_RuntimeError, "Too few arguments."); + PyErr_SetString(PyExc_RuntimeError, "Too few arguments."); return NULL; } @@ -133,13 +133,13 @@ PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObje } catch (std::exception &err) { - PyErr_Format(PyExc_RuntimeError, err.what()); + PyErr_SetString(PyExc_RuntimeError, err.what()); return NULL; } } else { - PyErr_Format(PyExc_RuntimeError, "Device not found."); + PyErr_SetString(PyExc_RuntimeError, "Device not found."); return NULL; } @@ -167,7 +167,7 @@ PyObject *Video_out_file_manager_close(Video_out_file_manager *self, PyObject *a } catch(std::exception &err) { - PyErr_Format(PyExc_RuntimeError, err.what()); + PyErr_SetString(PyExc_RuntimeError, err.what()); return NULL; } @@ -184,7 +184,7 @@ PyObject *Video_out_file_manager_Set_Frame_Rate(Video_out_file_manager *self, Py if(PyObject_Length(args) < 2) { - PyErr_Format(PyExc_RuntimeError, "Too few arguments."); + PyErr_SetString(PyExc_RuntimeError, "Too few arguments."); return NULL; } @@ -204,14 +204,14 @@ PyObject *Video_out_file_manager_Set_Frame_Rate(Video_out_file_manager *self, Py } catch(std::exception &err) { - PyErr_Format(PyExc_RuntimeError, err.what()); + PyErr_SetString(PyExc_RuntimeError, err.what()); return NULL; } } else { - PyErr_Format(PyExc_RuntimeError, "Device not found."); + PyErr_SetString(PyExc_RuntimeError, "Device not found."); return NULL; } @@ -227,7 +227,7 @@ PyObject *Video_out_file_manager_Set_Video_Codec(Video_out_file_manager *self, P if(PyObject_Length(args) < 2) { - PyErr_Format(PyExc_RuntimeError, "Too few arguments."); + PyErr_SetString(PyExc_RuntimeError, "Too few arguments."); return NULL; } @@ -256,14 +256,14 @@ PyObject *Video_out_file_manager_Set_Video_Codec(Video_out_file_manager *self, P } catch(std::exception &err) { - PyErr_Format(PyExc_RuntimeError, err.what()); + PyErr_SetString(PyExc_RuntimeError, err.what()); return NULL; } } else { - PyErr_Format(PyExc_RuntimeError, "Device not found."); + PyErr_SetString(PyExc_RuntimeError, "Device not found."); return NULL; } @@ -278,7 +278,7 @@ PyObject *Video_out_file_manager_Enable_Real_Time_Frame_Rate(Video_out_file_mana if(PyObject_Length(args) < 2) { - PyErr_Format(PyExc_RuntimeError, "Too few arguments."); + PyErr_SetString(PyExc_RuntimeError, "Too few arguments."); return NULL; } @@ -298,14 +298,14 @@ PyObject *Video_out_file_manager_Enable_Real_Time_Frame_Rate(Video_out_file_mana } catch(std::exception &err) { - PyErr_Format(PyExc_RuntimeError, err.what()); + PyErr_SetString(PyExc_RuntimeError, err.what()); return NULL; } } else { - PyErr_Format(PyExc_RuntimeError, "Device not found."); + PyErr_SetString(PyExc_RuntimeError, "Device not found."); return NULL; } From 147c8a7bce775b254c5a33618d84172d9e087d6a Mon Sep 17 00:00:00 2001 From: TimSC Date: Thu, 19 Dec 2013 11:19:30 +0000 Subject: [PATCH 218/256] Change from self timing frames to use arguments for function --- base.h | 9 +++++-- libvideolive.cpp | 3 --- mfvideooutfile.cpp | 42 +++++++++++--------------------- mfvideooutfile.h | 10 ++++---- namedpipeout.cpp | 4 +++- namedpipeout.h | 4 +++- v4l2out.h | 4 +++- videooutfile.cpp | 60 +++++++++++++--------------------------------- videooutfile.h | 1 - videooutfile.py | 2 -- 10 files changed, 51 insertions(+), 88 deletions(-) diff --git a/base.h b/base.h index 12920d5..28929f4 100644 --- a/base.h +++ b/base.h @@ -70,14 +70,19 @@ class Base_Video_Out Base_Video_Out() {}; virtual ~Base_Video_Out() {}; - virtual void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) {}; + virtual void SendFrame(const char *imgIn, + unsigned imgLen, + const char *pxFmt, + int width, + int height, + unsigned long tv_sec = 0, + unsigned long tv_usec = 0) {}; virtual void Stop() {}; virtual int WaitForStop() {return 1;}; virtual void SetOutputSize(int width, int height) {}; virtual void SetOutputPxFmt(const char *fmt) {}; virtual void SetFrameRate(unsigned int frameRateIn) {}; virtual void SetVideoCodec(const char *codec, unsigned int bitrate) {}; - virtual void EnableRealTimeFrameRate(int varEnable) {}; void Run() {}; }; diff --git a/libvideolive.cpp b/libvideolive.cpp index f865939..f1d39e5 100644 --- a/libvideolive.cpp +++ b/libvideolive.cpp @@ -140,9 +140,6 @@ static PyMethodDef Video_out_file_manager_methods[] = { {"set_video_codec", (PyCFunction)Video_out_file_manager_Set_Video_Codec, METH_VARARGS, "set_video_codec(filename = 'out.wmv', codec = 'H264', bitrate)\n\n" "Set output video codec."}, - {"enable_real_time_frame_rate", (PyCFunction)Video_out_file_manager_Enable_Real_Time_Frame_Rate, METH_VARARGS, - "enable_real_time_frame_rate(filename = 'out.wmv', enable)\n\n" - "Set real time frame encoding."}, {NULL} }; diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index c8521ad..d9f74a4 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -98,7 +98,6 @@ MfVideoOutFile::MfVideoOutFile(const char *fiName) : Base_Video_Out() this->fina = CStringToWString(fiName); this->frameRateFps = 25; this->prevFrameDuration = 0; - this->variableFrameRateEnabled = 0; SetTimeToZero(this->startVideoTime); } @@ -121,7 +120,7 @@ void MfVideoOutFile::OpenFile() IMFMediaType *pMediaTypeIn = NULL; this->rtDuration = 1; std::string errMsg; - if(!this->variableFrameRateEnabled) + if(this->frameRateFps > 0) MFFrameRateToAverageTimePerFrame(this->frameRateFps, 1, &this->rtDuration); IMFAttributes *containerAttributes = NULL; @@ -319,7 +318,13 @@ void MfVideoOutFile::CloseFile() SafeRelease(&pSinkWriter); } -void MfVideoOutFile::SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) +void MfVideoOutFile::SendFrame(const char *imgIn, + unsigned imgLen, + const char *pxFmt, + int width, + int height, + unsigned long tv_sec, + unsigned long tv_usec) { if(this->pSinkWriter == NULL) this->OpenFile(); @@ -330,23 +335,13 @@ void MfVideoOutFile::SendFrame(const char *imgIn, unsigned imgLen, const char *p this->startVideoTime = timeNow; } - //Time since video start - unsigned long elapseSec = 0; - unsigned long elapseUSec = 0; - if(!this->variableFrameRateEnabled) + if(tv_sec == 0 && tv_usec == 0) { - //Fixed frame rate - elapseSec = (unsigned long)(this->rtStart / 1e7); - elapseUSec = (unsigned long)((this->rtStart - elapseSec * 1e7)/10. + 0.5); + //Using fixed frame rate and generate time stamps + tv_sec = (unsigned long)(this->rtStart / 1e7); + tv_usec = (unsigned long)((this->rtStart - tv_sec * 1e7)/10. + 0.5); this->rtStart += this->rtDuration; } - else - { - //Real time frames - double elapse = SubtractTimes(timeNow, this->startVideoTime); - elapseSec = (unsigned long)elapse; - elapseUSec = (unsigned long)(((elapse - (double)elapseSec) / (double)1e6) + 0.5); - } //Add frame to output buffer class FrameMetaData tmp; @@ -356,8 +351,8 @@ void MfVideoOutFile::SendFrame(const char *imgIn, unsigned imgLen, const char *p meta.width = width; meta.height = height; meta.buffLen = imgLen; - meta.tv_sec = elapseSec; - meta.tv_usec = elapseUSec; + meta.tv_sec = tv_sec; + meta.tv_usec = tv_usec; std::string img(imgIn, imgLen); this->outBuffer.push_back(img); @@ -545,15 +540,6 @@ void MfVideoOutFile::SetVideoCodec(const char *codec, unsigned int bitrateIn) this->bitRate = bitrateIn; } -void MfVideoOutFile::EnableRealTimeFrameRate(int varEnable) -{ - if(this->pSinkWriter != NULL) - { - throw std::runtime_error("Set video parameters before opening video file"); - } - this->variableFrameRateEnabled = varEnable; -} - void MfVideoOutFile::Run() { diff --git a/mfvideooutfile.h b/mfvideooutfile.h index b7cf630..3536f94 100644 --- a/mfvideooutfile.h +++ b/mfvideooutfile.h @@ -17,15 +17,16 @@ class MfVideoOutFile : public Base_Video_Out void OpenFile(); void CloseFile(); - void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height); + void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height, + unsigned long tv_sec = 0, + unsigned long tv_usec = 0); void Stop(); int WaitForStop(); virtual void SetOutputSize(int width, int height); virtual void SetOutputPxFmt(const char *fmt); - virtual void SetFrameRate(UINT32 frameRateIn); - virtual void SetVideoCodec(const char *codec, UINT32 bitrate); - virtual void EnableRealTimeFrameRate(int varEnable); + virtual void SetFrameRate(unsigned int frameRateIn); + virtual void SetVideoCodec(const char *codec, unsigned int bitrate); void MfVideoOutFile::CopyFromBufferToOutFile(int lastFrame = 0); void Run(); @@ -38,7 +39,6 @@ class MfVideoOutFile : public Base_Video_Out std::string pxFmt; std::string videoCodec; std::wstring fina; - int variableFrameRateEnabled; int outputWidth, outputHeight; UINT32 bitRate, frameRateFps; diff --git a/namedpipeout.cpp b/namedpipeout.cpp index d0cc547..78cc03c 100644 --- a/namedpipeout.cpp +++ b/namedpipeout.cpp @@ -295,7 +295,9 @@ NamedPipeOut::~NamedPipeOut() CoUninitialize(); } -void NamedPipeOut::SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) +void NamedPipeOut::SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height, + unsigned long tv_sec, + unsigned long tv_usec) { cout << "NamedPipeOut::SendFrame" << endl; diff --git a/namedpipeout.h b/namedpipeout.h index 665112b..a872a5c 100644 --- a/namedpipeout.h +++ b/namedpipeout.h @@ -13,7 +13,9 @@ class NamedPipeOut : public Base_Video_Out NamedPipeOut(const char *devName); virtual ~NamedPipeOut(); - void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height); + void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height, + unsigned long tv_sec = 0, + unsigned long tv_usec = 0); void Stop(); int WaitForStop(); diff --git a/v4l2out.h b/v4l2out.h index 7003e44..268fb4b 100644 --- a/v4l2out.h +++ b/v4l2out.h @@ -32,7 +32,9 @@ class Video_out : public Base_Video_Out public: void Run(); - void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height); + void SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height, + unsigned long tv_sec = 0, + unsigned long tv_usec = 0); void Stop(); int WaitForStop(); diff --git a/videooutfile.cpp b/videooutfile.cpp index fbaa0d4..0683469 100644 --- a/videooutfile.cpp +++ b/videooutfile.cpp @@ -92,7 +92,7 @@ PyObject *Video_out_file_manager_open(Video_out_file_manager *self, PyObject *ar PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObject *args) { //printf("Video_out_file_manager_Send_frame\n"); - //dev = '\\dev\\video0', img, pixel_format, width, height + //dev = '\\dev\\video0', img, pixel_format, width, height, time_sec, time_usec //Process arguments const char *devarg = NULL; @@ -100,6 +100,8 @@ PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObje const char *pxFmtIn = NULL; int widthIn = 0; int heightIn = 0; + unsigned long time_sec = 0; + unsigned long time_usec = 0; if(PyObject_Length(args) < 5) { @@ -123,13 +125,25 @@ PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObje PyObject *pyHeight = PyTuple_GetItem(args, 4); heightIn = PyInt_AsLong(pyHeight); + if(PyObject_Length(args) > 5) + { + PyObject *pyTimeSec = PyTuple_GetItem(args, 5); + time_sec = PyInt_AsLong(pyTimeSec); + } + + if(PyObject_Length(args) > 6) + { + PyObject *pyTimeUSec = PyTuple_GetItem(args, 6); + time_usec = PyInt_AsLong(pyTimeUSec); + } + std::map::iterator it = self->threads->find(devarg); if(it != self->threads->end()) { try { - it->second->SendFrame(imgIn, imgLen, pxFmtIn, widthIn, heightIn); + it->second->SendFrame(imgIn, imgLen, pxFmtIn, widthIn, heightIn, time_sec, time_usec); } catch (std::exception &err) { @@ -269,45 +283,3 @@ PyObject *Video_out_file_manager_Set_Video_Codec(Video_out_file_manager *self, P Py_RETURN_NONE; } - -PyObject *Video_out_file_manager_Enable_Real_Time_Frame_Rate(Video_out_file_manager *self, PyObject *args) -{ - //Process arguments - const char *devarg = NULL; - int realTimeFrameRate = 0; - - if(PyObject_Length(args) < 2) - { - PyErr_SetString(PyExc_RuntimeError, "Too few arguments."); - return NULL; - } - - PyObject *pydev = PyTuple_GetItem(args, 0); - devarg = PyString_AsString(pydev); - - PyObject *pyRealTimeFrameRate = PyTuple_GetItem(args, 1); - realTimeFrameRate = PyInt_AsLong(pyRealTimeFrameRate); - - std::map::iterator it = self->threads->find(devarg); - - if(it != self->threads->end()) - { - try - { - it->second->EnableRealTimeFrameRate(realTimeFrameRate); - } - catch(std::exception &err) - { - PyErr_SetString(PyExc_RuntimeError, err.what()); - return NULL; - } - - } - else - { - PyErr_SetString(PyExc_RuntimeError, "Device not found."); - return NULL; - } - - Py_RETURN_NONE; -} diff --git a/videooutfile.h b/videooutfile.h index 19706cc..bb33aec 100644 --- a/videooutfile.h +++ b/videooutfile.h @@ -24,7 +24,6 @@ PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObje PyObject *Video_out_file_manager_close(Video_out_file_manager *self, PyObject *args); PyObject *Video_out_file_manager_Set_Frame_Rate(Video_out_file_manager *self, PyObject *args); PyObject *Video_out_file_manager_Set_Video_Codec(Video_out_file_manager *self, PyObject *args); -PyObject *Video_out_file_manager_Enable_Real_Time_Frame_Rate(Video_out_file_manager *self, PyObject *args); #endif //VIDEOOUTFILE_H diff --git a/videooutfile.py b/videooutfile.py index 9ef70e8..d0bd584 100644 --- a/videooutfile.py +++ b/videooutfile.py @@ -20,8 +20,6 @@ outManager.set_video_codec(fina, "H264", 800000) print "set_frame_rate" outManager.set_frame_rate(fina, 25) - print "enable_real_time_frames", realTimeFrames - outManager.enable_real_time_frame_rate(fina, realTimeFrames) imgLen = w * h * 3 #img = np.ones(shape=(imgLen,), dtype=np.uint8) * 0 From aba20a6b0a44eea991cb174bde012caec2d44d31 Mon Sep 17 00:00:00 2001 From: TimSC Date: Thu, 19 Dec 2013 11:37:57 +0000 Subject: [PATCH 219/256] Change input type to duble --- videooutfile.cpp | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/videooutfile.cpp b/videooutfile.cpp index 0683469..4bfe26c 100644 --- a/videooutfile.cpp +++ b/videooutfile.cpp @@ -91,7 +91,7 @@ PyObject *Video_out_file_manager_open(Video_out_file_manager *self, PyObject *ar PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObject *args) { - //printf("Video_out_file_manager_Send_frame\n"); + //std::cout << "Video_out_file_manager_Send_frame" << std::endl; //dev = '\\dev\\video0', img, pixel_format, width, height, time_sec, time_usec //Process arguments @@ -101,7 +101,7 @@ PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObje int widthIn = 0; int heightIn = 0; unsigned long time_sec = 0; - unsigned long time_usec = 0; + double time_usec = 0; if(PyObject_Length(args) < 5) { @@ -134,7 +134,7 @@ PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObje if(PyObject_Length(args) > 6) { PyObject *pyTimeUSec = PyTuple_GetItem(args, 6); - time_usec = PyInt_AsLong(pyTimeUSec); + time_usec = PyFloat_AsDouble(pyTimeUSec); } std::map::iterator it = self->threads->find(devarg); @@ -143,7 +143,7 @@ PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObje { try { - it->second->SendFrame(imgIn, imgLen, pxFmtIn, widthIn, heightIn, time_sec, time_usec); + it->second->SendFrame(imgIn, imgLen, pxFmtIn, widthIn, heightIn, time_sec, (unsigned int)(time_usec+0.5)); } catch (std::exception &err) { From 5a6d2dc8c4dd5a9b77c70159074efafc73874893 Mon Sep 17 00:00:00 2001 From: TimSC Date: Thu, 19 Dec 2013 13:12:21 +0000 Subject: [PATCH 220/256] Catch out of range error but unsure if this is a good approach --- pixfmt.cpp | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/pixfmt.cpp b/pixfmt.cpp index 878b4d7..b640c48 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -462,6 +462,15 @@ int ConvertRgb24ToI420orYV12(const unsigned char *im, unsigned dataLen, unsigned UOutOffset = colOffset + uPlaneOffset; unsigned VOutOffset = colOffset + vPlaneOffset; + if(rgbInOffset1+2 >= dataLen) + throw std::runtime_error("Internal error (1) in ConvertRgb24ToI420orYV12"); + if(rgbInOffset2+2 >= dataLen) + throw std::runtime_error("Internal error (1) in ConvertRgb24ToI420orYV12"); + if(rgbInOffset3+2 >= dataLen) + throw std::runtime_error("Internal error (1) in ConvertRgb24ToI420orYV12"); + if(rgbInOffset4+2 >= dataLen) + throw std::runtime_error("Internal error (1) in ConvertRgb24ToI420orYV12"); + unsigned Y1 = 66 * im[rgbInOffset1] + 129 * im[rgbInOffset1+1] + 25 * im[rgbInOffset1+2]; unsigned Y2 = 66 * im[rgbInOffset2] + 129 * im[rgbInOffset2+1] + 25 * im[rgbInOffset2+2]; unsigned Y3 = 66 * im[rgbInOffset3] + 129 * im[rgbInOffset3+1] + 25 * im[rgbInOffset3+2]; @@ -492,6 +501,19 @@ int ConvertRgb24ToI420orYV12(const unsigned char *im, unsigned dataLen, V3 = ((V3 + 128) >> 8) + 128; V4 = ((V4 + 128) >> 8) + 128; + if(YOutOffset1 >= *buffOutLen) + throw std::runtime_error("Internal error (2) in ConvertRgb24ToI420orYV12"); + if(YOutOffset2 >= *buffOutLen) + throw std::runtime_error("Internal error (2) in ConvertRgb24ToI420orYV12"); + if(YOutOffset3 >= *buffOutLen) + throw std::runtime_error("Internal error (2) in ConvertRgb24ToI420orYV12"); + if(YOutOffset4 >= *buffOutLen) + throw std::runtime_error("Internal error (2) in ConvertRgb24ToI420orYV12"); + if(VOutOffset >= *buffOutLen) + throw std::runtime_error("Internal error (2) in ConvertRgb24ToI420orYV12"); + if(UOutOffset >= *buffOutLen) + throw std::runtime_error("Internal error (2) in ConvertRgb24ToI420orYV12"); + (*buffOut)[YOutOffset1] = Y1; (*buffOut)[YOutOffset2] = Y2; (*buffOut)[YOutOffset3] = Y3; From 243abbaac3db3792fc68eef491652e0c49af5a82 Mon Sep 17 00:00:00 2001 From: TimSC Date: Thu, 19 Dec 2013 17:20:44 +0000 Subject: [PATCH 221/256] Close file when finished --- mfvideooutfile.cpp | 6 +++++- mfvideooutfile.h | 1 + pixfmt.cpp | 35 +++++++++++++++-------------------- 3 files changed, 21 insertions(+), 21 deletions(-) diff --git a/mfvideooutfile.cpp b/mfvideooutfile.cpp index d9f74a4..3d0c9d8 100644 --- a/mfvideooutfile.cpp +++ b/mfvideooutfile.cpp @@ -98,6 +98,7 @@ MfVideoOutFile::MfVideoOutFile(const char *fiName) : Base_Video_Out() this->fina = CStringToWString(fiName); this->frameRateFps = 25; this->prevFrameDuration = 0; + this->pIByteStream = NULL; SetTimeToZero(this->startVideoTime); } @@ -126,7 +127,7 @@ void MfVideoOutFile::OpenFile() IMFAttributes *containerAttributes = NULL; HRESULT hr = MFCreateAttributes(&containerAttributes, 0); - IMFByteStream *pIByteStream = NULL; + this->pIByteStream = NULL; if (SUCCEEDED(hr)) { @@ -315,7 +316,10 @@ void MfVideoOutFile::CloseFile() { HRESULT hr = this->pSinkWriter->Finalize(); } + SafeRelease(&pSinkWriter); + + SafeRelease(&pIByteStream); } void MfVideoOutFile::SendFrame(const char *imgIn, diff --git a/mfvideooutfile.h b/mfvideooutfile.h index 3536f94..3acf68d 100644 --- a/mfvideooutfile.h +++ b/mfvideooutfile.h @@ -46,6 +46,7 @@ class MfVideoOutFile : public Base_Video_Out std::vector outBufferMeta; std::vector outBuffer; LONGLONG prevFrameDuration; + IMFByteStream *pIByteStream; }; void *MfVideoOut_File_Worker_thread(void *arg); diff --git a/pixfmt.cpp b/pixfmt.cpp index b640c48..481e897 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -421,6 +421,11 @@ int ConvertRgb24ToI420orYV12(const unsigned char *im, unsigned dataLen, unsigned *buffOutLen, const char *outPxFmt) { + //Check if input buffer is of sufficient size + int requiredInputSize = 3 * width * height; + if(requiredInputSize != dataLen) + throw std::runtime_error("Input buffer has unexpected size"); + //Create output buffer if required int requiredSize = width * height * 1.5; if(*buffOutLen != 0 && *buffOutLen != requiredSize) @@ -462,14 +467,10 @@ int ConvertRgb24ToI420orYV12(const unsigned char *im, unsigned dataLen, unsigned UOutOffset = colOffset + uPlaneOffset; unsigned VOutOffset = colOffset + vPlaneOffset; - if(rgbInOffset1+2 >= dataLen) - throw std::runtime_error("Internal error (1) in ConvertRgb24ToI420orYV12"); - if(rgbInOffset2+2 >= dataLen) - throw std::runtime_error("Internal error (1) in ConvertRgb24ToI420orYV12"); - if(rgbInOffset3+2 >= dataLen) - throw std::runtime_error("Internal error (1) in ConvertRgb24ToI420orYV12"); - if(rgbInOffset4+2 >= dataLen) - throw std::runtime_error("Internal error (1) in ConvertRgb24ToI420orYV12"); + if(rgbInOffset1+2 >= dataLen) {throw std::runtime_error("1");} + if(rgbInOffset2+2 >= dataLen) {throw std::runtime_error("2");} + if(rgbInOffset3+2 >= dataLen) {throw std::runtime_error("3");} + if(rgbInOffset4+2 >= dataLen) {throw std::runtime_error("4");} unsigned Y1 = 66 * im[rgbInOffset1] + 129 * im[rgbInOffset1+1] + 25 * im[rgbInOffset1+2]; unsigned Y2 = 66 * im[rgbInOffset2] + 129 * im[rgbInOffset2+1] + 25 * im[rgbInOffset2+2]; @@ -501,18 +502,12 @@ int ConvertRgb24ToI420orYV12(const unsigned char *im, unsigned dataLen, V3 = ((V3 + 128) >> 8) + 128; V4 = ((V4 + 128) >> 8) + 128; - if(YOutOffset1 >= *buffOutLen) - throw std::runtime_error("Internal error (2) in ConvertRgb24ToI420orYV12"); - if(YOutOffset2 >= *buffOutLen) - throw std::runtime_error("Internal error (2) in ConvertRgb24ToI420orYV12"); - if(YOutOffset3 >= *buffOutLen) - throw std::runtime_error("Internal error (2) in ConvertRgb24ToI420orYV12"); - if(YOutOffset4 >= *buffOutLen) - throw std::runtime_error("Internal error (2) in ConvertRgb24ToI420orYV12"); - if(VOutOffset >= *buffOutLen) - throw std::runtime_error("Internal error (2) in ConvertRgb24ToI420orYV12"); - if(UOutOffset >= *buffOutLen) - throw std::runtime_error("Internal error (2) in ConvertRgb24ToI420orYV12"); + if(YOutOffset1 >= *buffOutLen) {throw std::runtime_error("5");} + if(YOutOffset2 >= *buffOutLen) {throw std::runtime_error("6");} + if(YOutOffset3 >= *buffOutLen) {throw std::runtime_error("7");} + if(YOutOffset4 >= *buffOutLen) {throw std::runtime_error("8");} + if(VOutOffset >= *buffOutLen) {throw std::runtime_error("9");} + if(UOutOffset >= *buffOutLen) {throw std::runtime_error("10");} (*buffOut)[YOutOffset1] = Y1; (*buffOut)[YOutOffset2] = Y2; From 572c344f3c9df9e774f8a5b1ba067bb4cc43d647 Mon Sep 17 00:00:00 2001 From: TimSC Date: Thu, 19 Dec 2013 18:29:18 +0000 Subject: [PATCH 222/256] Support both byte arrays and strings for input images --- pixfmt.cpp | 2 +- videoout.cpp | 5 +++++ videooutfile.cpp | 12 ++++++++++-- 3 files changed, 16 insertions(+), 3 deletions(-) diff --git a/pixfmt.cpp b/pixfmt.cpp index 481e897..c433dab 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -518,7 +518,7 @@ int ConvertRgb24ToI420orYV12(const unsigned char *im, unsigned dataLen, (*buffOut)[UOutOffset] = (unsigned char)((U1+U2+U3+U4)/4.+0.5); } } - + return 1; } diff --git a/videoout.cpp b/videoout.cpp index 3994543..573fce0 100644 --- a/videoout.cpp +++ b/videoout.cpp @@ -94,8 +94,13 @@ PyObject *Video_out_manager_Send_frame(Video_out_manager *self, PyObject *args) PyObject *pyimg = PyTuple_GetItem(args, 1); imgIn = PyString_AsString(pyimg); + if(imgIn==NULL && PyString_Check(pyimg)) imgIn = PyString_AsString(pyimg); + if(imgIn==NULL && PyByteArray_Check(pyimg)) imgIn = PyByteArray_AsString(pyimg); Py_ssize_t imgLen = PyObject_Length(pyimg); + if(imgIn == NULL) + PyErr_SetString(PyExc_RuntimeError, "Argument 2 must be a string or bytearray."); + PyObject *pyPxFmt = PyTuple_GetItem(args, 2); pxFmtIn = PyString_AsString(pyPxFmt); diff --git a/videooutfile.cpp b/videooutfile.cpp index 4bfe26c..d3c39b9 100644 --- a/videooutfile.cpp +++ b/videooutfile.cpp @@ -113,9 +113,14 @@ PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObje devarg = PyString_AsString(pydev); PyObject *pyimg = PyTuple_GetItem(args, 1); - imgIn = PyString_AsString(pyimg); + if(imgIn==NULL && PyString_Check(pyimg)) imgIn = PyString_AsString(pyimg); + if(imgIn==NULL && PyByteArray_Check(pyimg)) imgIn = PyByteArray_AsString(pyimg); + Py_ssize_t imgLen = PyObject_Length(pyimg); + if(imgIn == NULL) + PyErr_SetString(PyExc_RuntimeError, "Argument 2 must be a string or byte array."); + PyObject *pyPxFmt = PyTuple_GetItem(args, 2); pxFmtIn = PyString_AsString(pyPxFmt); @@ -138,12 +143,15 @@ PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObje } std::map::iterator it = self->threads->find(devarg); + class Base_Video_Out *vidOut = NULL; if(it != self->threads->end()) { try { - it->second->SendFrame(imgIn, imgLen, pxFmtIn, widthIn, heightIn, time_sec, (unsigned int)(time_usec+0.5)); + vidOut = it->second; + if(imgIn != NULL) + vidOut->SendFrame(imgIn, imgLen, pxFmtIn, widthIn, heightIn, time_sec, (unsigned int)(time_usec+0.5)); } catch (std::exception &err) { From 4a10afa76e95b32862d7b85099f1b497b42ae281 Mon Sep 17 00:00:00 2001 From: TimSC Date: Thu, 19 Dec 2013 18:51:16 +0000 Subject: [PATCH 223/256] Simplify time stamp in python api --- libvideolive.cpp | 2 +- videooutfile.cpp | 21 ++++++++------------- 2 files changed, 9 insertions(+), 14 deletions(-) diff --git a/libvideolive.cpp b/libvideolive.cpp index f1d39e5..dd49417 100644 --- a/libvideolive.cpp +++ b/libvideolive.cpp @@ -129,7 +129,7 @@ static PyMethodDef Video_out_file_manager_methods[] = { "open(filename = 'out.wmv', width, height)\n\n" "Open video output."}, {"send_frame", (PyCFunction)Video_out_file_manager_Send_frame, METH_VARARGS, - "send_frame(filename = 'out.wmv', img, pixel_format, width, height)\n\n" + "send_frame(filename = 'out.wmv', img, pixel_format, width, height, timestamp=None)\n\n" "Send frame to video stream output."}, {"close", (PyCFunction)Video_out_file_manager_close, METH_VARARGS, "close(filename = 'out.wmv')\n\n" diff --git a/videooutfile.cpp b/videooutfile.cpp index d3c39b9..8e787c9 100644 --- a/videooutfile.cpp +++ b/videooutfile.cpp @@ -100,8 +100,7 @@ PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObje const char *pxFmtIn = NULL; int widthIn = 0; int heightIn = 0; - unsigned long time_sec = 0; - double time_usec = 0; + double time_sec = 0; if(PyObject_Length(args) < 5) { @@ -133,25 +132,21 @@ PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObje if(PyObject_Length(args) > 5) { PyObject *pyTimeSec = PyTuple_GetItem(args, 5); - time_sec = PyInt_AsLong(pyTimeSec); - } - - if(PyObject_Length(args) > 6) - { - PyObject *pyTimeUSec = PyTuple_GetItem(args, 6); - time_usec = PyFloat_AsDouble(pyTimeUSec); + if(pyTimeSec == Py_None) time_sec = 0; + if(PyInt_Check(pyTimeSec)) time_sec = PyInt_AsLong(pyTimeSec); + if(PyFloat_Check(pyTimeSec)) time_sec = PyFloat_AsDouble(pyTimeSec); } std::map::iterator it = self->threads->find(devarg); - class Base_Video_Out *vidOut = NULL; if(it != self->threads->end()) { try { - vidOut = it->second; - if(imgIn != NULL) - vidOut->SendFrame(imgIn, imgLen, pxFmtIn, widthIn, heightIn, time_sec, (unsigned int)(time_usec+0.5)); + unsigned int timeSec = (unsigned int)(time_sec); + double time_usec = (time_sec - timeSec) * 1e6; + + it->second->SendFrame(imgIn, imgLen, pxFmtIn, widthIn, heightIn, time_sec, (unsigned int)(time_usec+0.5)); } catch (std::exception &err) { From c0a53151fb4c4ea1df4255bf9672c1c9e3549181 Mon Sep 17 00:00:00 2001 From: TimSC Date: Sat, 21 Dec 2013 14:43:09 +0000 Subject: [PATCH 224/256] Read exposure of webcam --- mfvideoin.cpp | 48 ++++++++++++++++++++++++++++++++++++++++++++++++ mfvideoin.h | 2 ++ 2 files changed, 50 insertions(+) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index 06e66c3..d3b787f 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -9,10 +9,14 @@ using namespace std; #include #include #include +#include #include "mfvideoin.h" #include "pixfmt.h" +//See also: +//https://github.com/Itseez/opencv/blob/master/modules/highgui/src/cap_msmf.cpp + #define MAX_DEVICE_ID_LEN 100 int EnumDevices(IMFActivate ***ppDevicesOut); @@ -769,9 +773,52 @@ void MfVideoIn::StartDeviceInternal() this->reader = readerTmp; + this->GetMfParameter(); + SafeRelease(&pAttributes); } +int MfVideoIn::GetMfParameter(long prop) +{ + long CurrentValue = 0; + long Min = 0; + long Max = 0; + long Step = 0; + long Default = 0; + long Flag = 0; + + if(prop==0) + prop = CameraControl_Exposure; + + IAMCameraControl *pProcControl = NULL; + HRESULT hr = this->source->QueryInterface(IID_PPV_ARGS(&pProcControl)); + if(!SUCCEEDED(hr)) + throw runtime_error("IAMCameraControl interface not available"); + + hr = pProcControl->GetRange(prop, &Min, &Max, &Step, &Default, &Flag); + if(!SUCCEEDED(hr)) + { + SafeRelease(&pProcControl); + return 0; + } + + std::cout << "CurrentValue " << CurrentValue << std::endl; + std::cout << "Min " << Min << std::endl; + std::cout << "Max " << Max << std::endl; + std::cout << "Step " << Step << std::endl; + std::cout << "Default " << Default << std::endl; + std::cout << "Flag " << Flag << std::endl; + + long val = 0, flags = 0; + hr = pProcControl->Get(prop, &val, &flags); + + std::cout << "Value " << val << std::endl; + std::cout << "Flag " << flags << std::endl; + + SafeRelease(&pProcControl); + return 1; +} + void MfVideoIn::SetSampleMetaData(DWORD streamIndex) { //Set meta data in output object @@ -810,6 +857,7 @@ void MfVideoIn::SetSampleMetaData(DWORD streamIndex) } void MfVideoIn::PopFrontMetaDataBuff() + { if(this->plStrideBuff.size()>0) this->plStrideBuff.erase(this->plStrideBuff.begin()); if(this->majorTypeBuff.size()>0) this->majorTypeBuff.erase(this->majorTypeBuff.begin()); diff --git a/mfvideoin.h b/mfvideoin.h index 99491ed..e536485 100644 --- a/mfvideoin.h +++ b/mfvideoin.h @@ -31,6 +31,8 @@ class MfVideoIn : public WmfBase virtual void CloseDevice(); virtual int GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut); + virtual int GetMfParameter(long prop = 0); + void Run(); protected: From 5a4826ba35dab59bdf62ddb1507b703f1c4d550b Mon Sep 17 00:00:00 2001 From: TimSC Date: Sat, 21 Dec 2013 15:04:26 +0000 Subject: [PATCH 225/256] Trying to set exposure but makes little difference --- mfvideoin.cpp | 25 +++++++++++++++++++++++-- mfvideoin.h | 1 + 2 files changed, 24 insertions(+), 2 deletions(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index d3b787f..32e6164 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -773,7 +773,9 @@ void MfVideoIn::StartDeviceInternal() this->reader = readerTmp; - this->GetMfParameter(); + //this->GetMfParameter(); + //this->SetMfParameter(CameraControl_Exposure, -6, 2); + //this->GetMfParameter(); SafeRelease(&pAttributes); } @@ -816,7 +818,26 @@ int MfVideoIn::GetMfParameter(long prop) std::cout << "Flag " << flags << std::endl; SafeRelease(&pProcControl); - return 1; + return SUCCEEDED(hr); +} + +int MfVideoIn::SetMfParameter(long prop, long value, long flags) +{ + if(prop==0) + prop = CameraControl_Exposure; + if(flags==0) + flags = CameraControl_Flags_Manual; + + IAMCameraControl *pProcControl = NULL; + HRESULT hr = this->source->QueryInterface(IID_PPV_ARGS(&pProcControl)); + if(!SUCCEEDED(hr)) + throw runtime_error("IAMCameraControl interface not available"); + + hr = pProcControl->Set(prop, value, flags); + + SafeRelease(&pProcControl); + return SUCCEEDED(hr); + } void MfVideoIn::SetSampleMetaData(DWORD streamIndex) diff --git a/mfvideoin.h b/mfvideoin.h index e536485..f8284b5 100644 --- a/mfvideoin.h +++ b/mfvideoin.h @@ -32,6 +32,7 @@ class MfVideoIn : public WmfBase virtual int GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut); virtual int GetMfParameter(long prop = 0); + virtual int SetMfParameter(long prop = 0, long value = 0, long flags = 0); void Run(); protected: From fd71cdd406ae83112e5b2d8e061bdf64c292236c Mon Sep 17 00:00:00 2001 From: TimSC Date: Sat, 21 Dec 2013 17:01:08 +0000 Subject: [PATCH 226/256] Hack to set exposure --- mfvideoin.cpp | 111 ++++++++++++++++++++++++++++++++++++++++---------- mfvideoin.h | 6 ++- 2 files changed, 94 insertions(+), 23 deletions(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index 32e6164..e08aefa 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -591,7 +591,9 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) if(wcscmp(this->subTypeBuff[0].c_str(), L"MFVideoFormat_YUY2")==0) currentPixFmt = "YUYV"; //YUYV = YUY2 - + if(wcscmp(this->subTypeBuff[0].c_str(), L"MFVideoFormat_RGB24")==0) + currentPixFmt = "RGB24"; + //Do conversion to rgb unsigned char *buffConv = NULL; unsigned buffConvLen = 0; @@ -773,25 +775,21 @@ void MfVideoIn::StartDeviceInternal() this->reader = readerTmp; - //this->GetMfParameter(); - //this->SetMfParameter(CameraControl_Exposure, -6, 2); - //this->GetMfParameter(); + //this->GetMfControl(CameraControl_Exposure); + //this->SetMfControl(CameraControl_Exposure, -3, 1); + //this->GetMfControl(CameraControl_Exposure, 1); SafeRelease(&pAttributes); } -int MfVideoIn::GetMfParameter(long prop) +int MfVideoIn::GetMfControl(long prop, int range) { - long CurrentValue = 0; long Min = 0; long Max = 0; long Step = 0; long Default = 0; long Flag = 0; - if(prop==0) - prop = CameraControl_Exposure; - IAMCameraControl *pProcControl = NULL; HRESULT hr = this->source->QueryInterface(IID_PPV_ARGS(&pProcControl)); if(!SUCCEEDED(hr)) @@ -804,27 +802,27 @@ int MfVideoIn::GetMfParameter(long prop) return 0; } - std::cout << "CurrentValue " << CurrentValue << std::endl; - std::cout << "Min " << Min << std::endl; - std::cout << "Max " << Max << std::endl; - std::cout << "Step " << Step << std::endl; - std::cout << "Default " << Default << std::endl; - std::cout << "Flag " << Flag << std::endl; + if(range) + { + std::cout << "Min " << Min << std::endl; + std::cout << "Max " << Max << std::endl; + std::cout << "Step " << Step << std::endl; + std::cout << "Default " << Default << std::endl; + std::cout << "Allowed Flag " << Flag << std::endl; + } long val = 0, flags = 0; hr = pProcControl->Get(prop, &val, &flags); - std::cout << "Value " << val << std::endl; - std::cout << "Flag " << flags << std::endl; + std::cout << "Current Value " << prop << " " << val << std::endl; + std::cout << "Current Flag " << prop << " " << flags << std::endl; SafeRelease(&pProcControl); return SUCCEEDED(hr); } -int MfVideoIn::SetMfParameter(long prop, long value, long flags) +int MfVideoIn::SetMfControl(long prop, long value, long flags) { - if(prop==0) - prop = CameraControl_Exposure; if(flags==0) flags = CameraControl_Flags_Manual; @@ -840,6 +838,62 @@ int MfVideoIn::SetMfParameter(long prop, long value, long flags) } +int MfVideoIn::GetMfParameter(long param, int range) +{ + long Min = 0; + long Max = 0; + long Step = 0; + long Default = 0; + long Flag = 0; + + IAMVideoProcAmp *pProcAmp = NULL; + HRESULT hr = this->source->QueryInterface(IID_PPV_ARGS(&pProcAmp)); + if(!SUCCEEDED(hr)) + throw runtime_error("IAMCameraControl interface not available"); + + hr = pProcAmp->GetRange(param, &Min, &Max, &Step, &Default, &Flag); + if(!SUCCEEDED(hr)) + { + SafeRelease(&pProcAmp); + return 0; + } + + if(range) + { + std::cout << "param "<< param << " Min " << Min << std::endl; + std::cout << "param "<< param << " Max " << Max << std::endl; + std::cout << "param "<< param << " Step " << Step << std::endl; + std::cout << "param "<< param << " Default " << Default << std::endl; + std::cout << "param "<< param << " Allowed Flag " << Flag << std::endl; + } + + long val = 0, flags = 0; + hr = pProcAmp->Get(param, &val, &flags); + + std::cout << "param "<< param << " Current Value " << val << std::endl; + std::cout << "param "<< param << " Current Flag " << flags << std::endl; + + SafeRelease(&pProcAmp); + return SUCCEEDED(hr); +} + +int MfVideoIn::SetMfParameter(long param, long value, long flags) +{ + if(flags==0) + flags = CameraControl_Flags_Manual; + + IAMVideoProcAmp *pProcAmp = NULL; + HRESULT hr = this->source->QueryInterface(IID_PPV_ARGS(&pProcAmp)); + if(!SUCCEEDED(hr)) + throw runtime_error("IAMCameraControl interface not available"); + + hr = pProcAmp->Set(param, value, flags); + + SafeRelease(&pProcAmp); + return SUCCEEDED(hr); + +} + void MfVideoIn::SetSampleMetaData(DWORD streamIndex) { //Set meta data in output object @@ -961,7 +1015,22 @@ void MfVideoIn::ReadFramesInternal() delete [] frame; } - LeaveCriticalSection(&lock); + LeaveCriticalSection(&lock); + + //for(long i=VideoProcAmp_Brightness;i<=VideoProcAmp_Gain;i++) + // this->GetMfParameter(i, 0); + int ret = this->SetMfControl(CameraControl_Exposure, -3, CameraControl_Flags_Manual); + std::cout << "ret" << ret << std::endl; + this->GetMfControl(CameraControl_Exposure, 1); + this->SetMfParameter(VideoProcAmp_Gain, 0, VideoProcAmp_Flags_Auto); + std::cout << "ret" << ret << std::endl; + this->GetMfParameter(VideoProcAmp_Gain, 0); + this->SetMfParameter(VideoProcAmp_Gamma, 72, VideoProcAmp_Flags_Auto); + std::cout << "ret" << ret << std::endl; + this->GetMfParameter(VideoProcAmp_Gamma, 0); + //for(long i=CameraControl_Pan;i<=CameraControl_Focus;i++) + // this->GetMfControl(i, 0); + return; } else diff --git a/mfvideoin.h b/mfvideoin.h index f8284b5..f76ad95 100644 --- a/mfvideoin.h +++ b/mfvideoin.h @@ -31,8 +31,10 @@ class MfVideoIn : public WmfBase virtual void CloseDevice(); virtual int GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut); - virtual int GetMfParameter(long prop = 0); - virtual int SetMfParameter(long prop = 0, long value = 0, long flags = 0); + virtual int GetMfControl(long prop, int range = 0); + virtual int SetMfControl(long prop, long value = 0, long flags = 0); + virtual int GetMfParameter(long param, int range = 0); + virtual int SetMfParameter(long param, long value = 0, long flags = 0); void Run(); protected: From 697f694e8b2a74e9a56b16b8823390ff52a7d08e Mon Sep 17 00:00:00 2001 From: TimSC Date: Sat, 21 Dec 2013 23:01:59 +0000 Subject: [PATCH 227/256] Disable test code --- mfvideoin.cpp | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index e08aefa..8606866 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -779,6 +779,16 @@ void MfVideoIn::StartDeviceInternal() //this->SetMfControl(CameraControl_Exposure, -3, 1); //this->GetMfControl(CameraControl_Exposure, 1); + /*try + { + int ret = this->SetMfControl(CameraControl_Exposure, -3, CameraControl_Flags_Manual); + std::cout << "ret" << ret << std::endl; + } + catch(std::runtime_error &err) + { + std::cout << "exception " << err.what() << std::endl; + }*/ + SafeRelease(&pAttributes); } @@ -1019,7 +1029,7 @@ void MfVideoIn::ReadFramesInternal() //for(long i=VideoProcAmp_Brightness;i<=VideoProcAmp_Gain;i++) // this->GetMfParameter(i, 0); - int ret = this->SetMfControl(CameraControl_Exposure, -3, CameraControl_Flags_Manual); + /*int ret = this->SetMfControl(CameraControl_Exposure, -3, CameraControl_Flags_Manual); std::cout << "ret" << ret << std::endl; this->GetMfControl(CameraControl_Exposure, 1); this->SetMfParameter(VideoProcAmp_Gain, 0, VideoProcAmp_Flags_Auto); @@ -1027,7 +1037,7 @@ void MfVideoIn::ReadFramesInternal() this->GetMfParameter(VideoProcAmp_Gain, 0); this->SetMfParameter(VideoProcAmp_Gamma, 72, VideoProcAmp_Flags_Auto); std::cout << "ret" << ret << std::endl; - this->GetMfParameter(VideoProcAmp_Gamma, 0); + this->GetMfParameter(VideoProcAmp_Gamma, 0);*/ //for(long i=CameraControl_Pan;i<=CameraControl_Focus;i++) // this->GetMfControl(i, 0); From 812eec12fd1352814bb586f4549d3a4388057ede Mon Sep 17 00:00:00 2001 From: TimSC Date: Tue, 31 Dec 2013 14:15:00 +0000 Subject: [PATCH 228/256] Vertical flip of webcam input --- mfvideoin.cpp | 2 +- pixfmt.cpp | 76 +++++++++++++++++++++++++++++++++++++++++++++++++-- pixfmt.h | 5 ++++ 3 files changed, 80 insertions(+), 3 deletions(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index 8606866..d599fce 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -592,7 +592,7 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) if(wcscmp(this->subTypeBuff[0].c_str(), L"MFVideoFormat_YUY2")==0) currentPixFmt = "YUYV"; //YUYV = YUY2 if(wcscmp(this->subTypeBuff[0].c_str(), L"MFVideoFormat_RGB24")==0) - currentPixFmt = "RGB24"; + currentPixFmt = "RGB24INV"; //Do conversion to rgb unsigned char *buffConv = NULL; diff --git a/pixfmt.cpp b/pixfmt.cpp index c433dab..f78672c 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -703,6 +703,52 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, } } + //Destination of RGB24INV, so convert to RGB24 first + if(strcmp(targetPxFmt, "RGB24INV")==0) + { + unsigned char *rgbBuff = NULL; + unsigned rgbBuffLen = 0; + int ret = DecodeFrame(data, dataLen, + inPxFmt, + width, height, + "RGB24", + &rgbBuff, + &rgbBuffLen); + + if(ret>0) + { + int ret2 = VerticalFlipRgb24(rgbBuff, rgbBuffLen, + width, height, + buffOut, + buffOutLen); + delete [] rgbBuff; + if(ret2>0) return ret2; + } + } + + //Vertical flip of RGB24 + if(strcmp(inPxFmt, "RGB24INV")==0) + { + unsigned char *rgbBuff = NULL; + unsigned rgbBuffLen = 0; + int ret = VerticalFlipRgb24(data, dataLen, + width, height, + &rgbBuff, + &rgbBuffLen); + + if(ret>0) + { + int ret2 = DecodeFrame(rgbBuff, rgbBuffLen, + "RGB24", + width, height, + targetPxFmt, + buffOut, + buffOutLen); + delete [] rgbBuff; + if(ret2>0) return ret2; + } + } + /* //Untested code if((strcmp(inPxFmt,"YUV2")==0 || strcmp(inPxFmt,"YVU2")==0) @@ -826,8 +872,6 @@ int CropToFitRgb24Image(const unsigned char *data, unsigned dataLen, return 1; } -//******************************************************************* - int ResizeFrame(const unsigned char *data, unsigned dataLen, const char *pxFmt, @@ -857,6 +901,34 @@ int ResizeFrame(const unsigned char *data, return 0; } +/// ************************************************************** + +int VerticalFlipRgb24(const unsigned char *im, unsigned dataLen, + int width, int height, + unsigned char **buffOut, + unsigned *buffOutLen) +{ + //RGB24 -> RGB24INV + //RGB24INV -> RGB24 + + if(dataLen != width * height * 3) + throw std::runtime_error("Input buffer has incorrect size"); + if(*buffOutLen != 0 && *buffOutLen != dataLen) + throw std::runtime_error("Output buffer has incorrect size"); + if(*buffOut == NULL) + *buffOut = new unsigned char[dataLen]; + *buffOutLen = dataLen; + + for(int y = 0; y < height; y++) + { + int invy = height - y - 1; + const unsigned char *inRow = &im[y * width * 3]; + unsigned char *outRow = &((*buffOut)[invy * width * 3]); + memcpy(outRow, inRow, width * 3); + } + return 1; +} + // ****** Combined resize and convert ************************************************* diff --git a/pixfmt.h b/pixfmt.h index fad783c..4930341 100644 --- a/pixfmt.h +++ b/pixfmt.h @@ -36,6 +36,11 @@ int ResizeRgb24ImageNN(const unsigned char *data, unsigned dataLen, unsigned buffOutLen, int widthOut, int heightOut, int invertVertical = 0, int tupleLen = 3); +int VerticalFlipRgb24(const unsigned char *im, unsigned dataLen, + int width, int height, + unsigned char **buffOut, + unsigned *buffOutLen); + int InsertHuffmanTableCTypes(const unsigned char* inBufferPtr, unsigned inBufferLen, std::string &outBuffer); #endif //_PIXFMT_H_ From fbd36f15662ccb5ebe5bc4c5354caaf385f0a7fe Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Wed, 1 Jan 2014 21:45:34 +0000 Subject: [PATCH 229/256] Use external clock with linux --- v4l2capture.cpp | 2 +- v4l2out.cpp | 25 +++++++++++++++++-------- v4l2out.h | 2 +- 3 files changed, 19 insertions(+), 10 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 18dfe7e..f05159c 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -62,7 +62,7 @@ int my_ioctl(int fd, int request, void *arg, int utimeout = -1) //printf("call\n"); int result = v4l2_ioctl(fd, request, arg); - //printf("%d\n", result); + printf("v4l2_ioctl %d\n", result); if(!result) { diff --git a/v4l2out.cpp b/v4l2out.cpp index 9da3e93..ee5e975 100644 --- a/v4l2out.cpp +++ b/v4l2out.cpp @@ -41,12 +41,16 @@ class SendFrameArgs std::string pxFmt; unsigned width; unsigned height; + unsigned long tv_sec; + unsigned long tv_usec; SendFrameArgs() { imgLen = 0; width = 0; height = 0; + tv_sec = 0; + tv_usec = 0; } SendFrameArgs(const SendFrameArgs &in) @@ -60,6 +64,8 @@ class SendFrameArgs height = in.height; imgLen = in.imgLen; pxFmt = in.pxFmt; + tv_sec = in.tv_sec; + tv_usec = in.tv_usec; return *this; } }; @@ -80,7 +86,8 @@ Video_out::Video_out(const char *devNameIn) : Base_Video_Out() outputHeight = 480; outputPxFmt = "YUYV"; - clock_gettime(CLOCK_MONOTONIC, &lastFrameTime); + lastFrameTime_sec = 0; + lastFrameTime_usec = 0; struct sigevent sevp; memset(&sevp, 0, sizeof(struct sigevent)); @@ -122,10 +129,8 @@ void Video_out::SendFrameInternal() pthread_mutex_unlock(&this->lock); //Check time since previous frame send - struct timespec tp; - clock_gettime(CLOCK_MONOTONIC, &tp); - long int secSinceLastFrame = tp.tv_sec - this->lastFrameTime.tv_sec; - long int nsecSinceLastFrame = tp.tv_nsec - this->lastFrameTime.tv_nsec; + long int secSinceLastFrame = args.tv_sec - this->lastFrameTime_sec; + long int nsecSinceLastFrame = args.tv_usec - this->lastFrameTime_usec; if(nsecSinceLastFrame < 0) { secSinceLastFrame -= 1; @@ -187,7 +192,8 @@ void Video_out::SendFrameInternal() printf("Write frame due to elapse time\n"); write(this->fdwr, this->currentFrame, this->framesize); - this->lastFrameTime = tp; + this->lastFrameTime_sec = args.tv_sec; + this->lastFrameTime_usec = args.tv_usec; } //Free image buffer @@ -292,7 +298,9 @@ void Video_out::Run() pthread_mutex_unlock(&this->lock); } -void Video_out::SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height) +void Video_out::SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, int width, int height, + unsigned long tv_sec, + unsigned long tv_usec) { pthread_mutex_lock(&this->lock); if(verbose) printf("SendFrame %i %s %i %i\n", imgLen, pxFmt, width, height); @@ -306,7 +314,8 @@ void Video_out::SendFrame(const char *imgIn, unsigned imgLen, const char *pxFmt, sendFrameArgsTmp.imgLen = imgLen; sendFrameArgsTmp.pxFmt = pxFmt; sendFrameArgsTmp.width = width; - sendFrameArgsTmp.height = height; + sendFrameArgsTmp.tv_sec = tv_sec; + sendFrameArgsTmp.tv_usec = tv_usec; this->sendFrameArgs.push_back(sendFrameArgsTmp); pthread_mutex_unlock(&this->lock); diff --git a/v4l2out.h b/v4l2out.h index 268fb4b..b504132 100644 --- a/v4l2out.h +++ b/v4l2out.h @@ -16,7 +16,7 @@ class Video_out : public Base_Video_Out int verbose; std::vector sendFrameArgs; std::vector sendFrameBuffer; - struct timespec lastFrameTime; + unsigned long lastFrameTime_sec, lastFrameTime_usec; int fdwr; int framesize; unsigned char *currentFrame; From ac116b7865a067c0f3fe297d67bcda8aeb877401 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Wed, 1 Jan 2014 21:46:48 +0000 Subject: [PATCH 230/256] Use external clock with linux --- v4l2capture.cpp | 3 +++ 1 file changed, 3 insertions(+) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index f05159c..02b5cb2 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -57,7 +57,10 @@ int my_ioctl(int fd, int request, void *arg, int utimeout = -1) int r = select(fd+1, &fds, NULL, NULL, &tv); if(r == 0) + { + printf("Time out\n"); return 1; //Timed out + } } //printf("call\n"); From 878baff84f5c769f5eb22db6351b06d96b7adc51 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Wed, 1 Jan 2014 22:50:21 +0000 Subject: [PATCH 231/256] Attempting to fix lingering linux mjpeg decode bug --- libvideolive.cpp | 83 ++++++++++++++++++++++++++++++++++++++++++++++-- pixfmt.cpp | 17 ++++++++-- v4l2capture.cpp | 13 ++++++-- 3 files changed, 105 insertions(+), 8 deletions(-) diff --git a/libvideolive.cpp b/libvideolive.cpp index dd49417..4bf7526 100644 --- a/libvideolive.cpp +++ b/libvideolive.cpp @@ -18,6 +18,7 @@ #include "videoout.h" #include "videoin.h" #include "videooutfile.h" +#include "pixfmt.h" // ********************************************************************* @@ -31,7 +32,7 @@ PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) if(PyTuple_Size(args) < 1) { PyErr_SetString(PyExc_TypeError, "Function requires 1 argument"); - Py_RETURN_NONE; + return NULL; } PyObject *inBuffer = PyTuple_GetItem(args, 0); @@ -43,7 +44,7 @@ PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) //PyObject_Print(type, stdout, Py_PRINT_RAW); //Py_CLEAR(type); - Py_RETURN_NONE; + return NULL; } unsigned char* inBufferPtr = (unsigned char*)PyString_AsString(inBuffer); @@ -57,6 +58,83 @@ PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) return outBufferPy; } +PyObject *DecodeAndResizeFrame(PyObject *self, PyObject *args) +{ + //0 string src pixFormat + //1 int src width + //2 int src height + //3 ByteArray src data + //4 string out pixFormat + //5 int out width + //6 int out height + //7 ByteArray out data + + if(PyTuple_Size(args) < 8) + { + PyErr_SetString(PyExc_TypeError, "Function requires 8 arguments"); + return NULL; + } + + //Input image + PyObject *inPixFmt = PyTuple_GetItem(args, 0); + if(!PyString_Check(inPixFmt)) {PyErr_SetString(PyExc_TypeError, "Argument 1 must be a string."); Py_RETURN_NONE;} + PyObject *inWidth = PyTuple_GetItem(args, 1); + if(!PyInt_Check(inWidth)) {PyErr_SetString(PyExc_TypeError, "Argument 2 must be an int."); Py_RETURN_NONE;} + PyObject *inHeight = PyTuple_GetItem(args, 2); + if(!PyInt_Check(inHeight)) {PyErr_SetString(PyExc_TypeError, "Argument 3 must be an int."); Py_RETURN_NONE;} + PyObject *inData = PyTuple_GetItem(args, 3); + if(!PyByteArray_Check(inData)) {PyErr_SetString(PyExc_TypeError, "Argument 4 must be a byte array."); Py_RETURN_NONE;} + + //Output image + PyObject *outPixFmt = PyTuple_GetItem(args, 4); + if(!PyString_Check(outPixFmt)) {PyErr_SetString(PyExc_TypeError, "Argument 5 must be a string."); Py_RETURN_NONE;} + PyObject *outWidth = PyTuple_GetItem(args, 5); + if(!PyInt_Check(outWidth)) {PyErr_SetString(PyExc_TypeError, "Argument 6 must be an int."); Py_RETURN_NONE;} + PyObject *outHeight = PyTuple_GetItem(args, 6); + if(!PyInt_Check(outHeight)) {PyErr_SetString(PyExc_TypeError, "Argument 7 must be an int."); Py_RETURN_NONE;} + PyObject *outData = PyTuple_GetItem(args, 7); + if(!PyByteArray_Check(outData)) {PyErr_SetString(PyExc_TypeError, "Argument 8 must be a byte array."); Py_RETURN_NONE;} + + unsigned char *buffOut = NULL; + unsigned buffOutLen = 0; + int useExistingBuff = 0; + if(PyByteArray_Size(outData) > 0) + { + buffOut = (unsigned char *)PyString_AsString(outData); + buffOutLen = PyByteArray_Size(outData); + useExistingBuff = 1; + } + + int ret = 0; + try + { + ret = DecodeAndResizeFrame((unsigned char*)PyString_AsString(inData), + PyString_Size(inData), + PyString_AsString(inPixFmt), + PyInt_AsLong(inWidth), PyInt_AsLong(inHeight), + PyString_AsString(outPixFmt), + &buffOut, + &buffOutLen, + PyInt_AsLong(outWidth), + PyInt_AsLong(outHeight)); + } + catch(std::exception &err) + { + PyErr_SetString(PyExc_RuntimeError, err.what()); + return NULL; + } + + if(!useExistingBuff && ret > 0) + { + PyByteArray_Resize(outData, buffOutLen); + memcpy(PyString_AsString(outData), buffOut, buffOutLen); + delete [] buffOut; + } + + return PyInt_FromLong(ret); +} + + // ********************************************************************* static PyMethodDef Device_manager_methods[] = { @@ -158,6 +236,7 @@ static PyTypeObject Video_out_file_manager_type = { static PyMethodDef module_methods[] = { { "InsertHuffmanTable", (PyCFunction)InsertHuffmanTable, METH_VARARGS, NULL }, + { "DecodeAndResizeFrame", (PyCFunction)DecodeAndResizeFrame, METH_VARARGS, NULL }, { NULL, NULL, 0, NULL } }; diff --git a/pixfmt.cpp b/pixfmt.cpp index f78672c..9e8b72f 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -153,6 +153,7 @@ int InsertHuffmanTableCTypes(const unsigned char* inBufferPtr, unsigned inBuffer int parsing = 1; unsigned frameStartPos = 0; int huffFound = 0; + int huffAdded = 0; outBuffer.clear(); @@ -177,13 +178,14 @@ int InsertHuffmanTableCTypes(const unsigned char* inBufferPtr, unsigned inBuffer //Stop if there is a serious error if(!ok) { - return 0; + return -1; } //Check if this segment is the compressed data if(twoBytes[0] == 0xff && twoBytes[1] == 0xda && !huffFound) { outBuffer.append(huffmanSegment, HUFFMAN_SEGMENT_LEN); + huffAdded = 1; } //Check the type of frame @@ -196,7 +198,8 @@ int InsertHuffmanTableCTypes(const unsigned char* inBufferPtr, unsigned inBuffer //Move cursor frameStartPos = frameEndPos; } - return 1; + if(huffAdded) return 1; + return 0; } // ********************************************************************* @@ -600,12 +603,20 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, if(strcmp(inPxFmt,"MJPEG")==0 && strcmp(targetPxFmt, "RGB24")==0) { std::string jpegBin; - InsertHuffmanTableCTypes(data, dataLen, jpegBin); + int huffRet = InsertHuffmanTableCTypes(data, dataLen, jpegBin); unsigned char *decodedBuff = NULL; unsigned decodedBuffSize = 0; int widthActual = 0, heightActual = 0, channelsActual = 0; + if(0) + { + //Save bin data to file for debug + FILE *jpegout = fopen("debug.jpg","wb"); + fwrite(jpegBin.c_str(), 1, jpegBin.length(), jpegout); + fclose(jpegout); + } + int jpegOk = ReadJpegFile((unsigned char*)jpegBin.c_str(), jpegBin.length(), &decodedBuff, &decodedBuffSize, diff --git a/v4l2capture.cpp b/v4l2capture.cpp index 02b5cb2..ca342ba 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -52,8 +52,8 @@ int my_ioctl(int fd, int request, void *arg, int utimeout = -1) FD_SET (fd, &fds); struct timeval tv; - tv.tv_sec = 0; - tv.tv_usec = utimeout; + tv.tv_sec = 0; + tv.tv_usec = utimeout; int r = select(fd+1, &fds, NULL, NULL, &tv); if(r == 0) @@ -61,11 +61,15 @@ int my_ioctl(int fd, int request, void *arg, int utimeout = -1) printf("Time out\n"); return 1; //Timed out } + else + printf("r %d %d\n", request, r); } //printf("call\n"); + if(request==VIDIOC_DQBUF) printf("VIDIOC_DQBUF\n"); + printf("VIDIOC_DQBUF = %d\n", VIDIOC_DQBUF); int result = v4l2_ioctl(fd, request, arg); - printf("v4l2_ioctl %d\n", result); + printf("v4l2_ioctl %d %d\n", request, result); if(!result) { @@ -255,13 +259,16 @@ int Video_in_Manager::ReadFrame() buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buffer.memory = V4L2_MEMORY_MMAP; + printf("Start VIDIOC_DQBUF\n"); if(my_ioctl(this->fd, VIDIOC_DQBUF, &buffer, 10000)) { return 0; } + printf("rx %d\n", buffer.bytesused); unsigned char *rgbBuff = NULL; unsigned rgbBuffLen = 0; + printf("buff index %d\n", buffer.index); int ok = DecodeFrame((const unsigned char*)this->buffers[buffer.index].start, buffer.bytesused, this->pxFmt.c_str(), this->frameWidth, From ab3d93b631d40e40ce9ebc9e93d24e4125c2635a Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Wed, 1 Jan 2014 22:57:45 +0000 Subject: [PATCH 232/256] Decoding fails in interactive mode as expected --- libvideolive.cpp | 2 +- pixfmt.cpp | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/libvideolive.cpp b/libvideolive.cpp index 4bf7526..d5b791e 100644 --- a/libvideolive.cpp +++ b/libvideolive.cpp @@ -108,7 +108,7 @@ PyObject *DecodeAndResizeFrame(PyObject *self, PyObject *args) int ret = 0; try { - ret = DecodeAndResizeFrame((unsigned char*)PyString_AsString(inData), + ret = DecodeAndResizeFrame((unsigned char*)PyByteArray_AsString(inData), PyString_Size(inData), PyString_AsString(inPixFmt), PyInt_AsLong(inWidth), PyInt_AsLong(inHeight), diff --git a/pixfmt.cpp b/pixfmt.cpp index 9e8b72f..fa51a65 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -54,6 +54,9 @@ int ReadJpegFrame(const unsigned char *data, unsigned offset, const unsigned cha //Based on http://www.gdcl.co.uk/2013/05/02/Motion-JPEG.html //and https://en.wikipedia.org/wiki/JPEG + if(data == NULL) + throw std::runtime_error("Input data is null pointer"); + *twoBytesOut = NULL; *frameStartPosOut = 0; *cursorOut = 0; @@ -150,6 +153,9 @@ int ReadJpegFrame(const unsigned char *data, unsigned offset, const unsigned cha int InsertHuffmanTableCTypes(const unsigned char* inBufferPtr, unsigned inBufferLen, std::string &outBuffer) { + if(inBufferPtr == NULL) + throw std::runtime_error("Input data is null pointer"); + int parsing = 1; unsigned frameStartPos = 0; int huffFound = 0; @@ -233,6 +239,9 @@ int ReadJpegFile(unsigned char * inbuffer, * working space (which is allocated as needed by the JPEG library). */ + if(inbuffer == NULL) + throw std::runtime_error("Input data is null pointer"); + if(inbuffer[0] != 0xFF || inbuffer[1] != 0xD8) return 0; From 160bde5152cfc67eab48964bd9c51921fe068263 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Wed, 1 Jan 2014 23:31:28 +0000 Subject: [PATCH 233/256] Initialise mem --- pixfmt.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pixfmt.cpp b/pixfmt.cpp index fa51a65..191f164 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -246,7 +246,9 @@ int ReadJpegFile(unsigned char * inbuffer, return 0; struct jpeg_decompress_struct cinfo; + memset(&cinfo, 0x00, sizeof(struct jpeg_decompress_struct)); struct my_error_mgr jerr; + memset(&jerr, 0x00, sizeof(struct my_error_mgr)); *outBuffer = NULL; *outBufferSize = 0; *widthOut = 0; From 9ae1fa7e54f949e08adee11286b245d05d12c3dd Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Thu, 2 Jan 2014 00:39:55 +0000 Subject: [PATCH 234/256] Fix memory size --- pixfmt.cpp | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/pixfmt.cpp b/pixfmt.cpp index 191f164..c1891c4 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -249,8 +249,6 @@ int ReadJpegFile(unsigned char * inbuffer, memset(&cinfo, 0x00, sizeof(struct jpeg_decompress_struct)); struct my_error_mgr jerr; memset(&jerr, 0x00, sizeof(struct my_error_mgr)); - *outBuffer = NULL; - *outBufferSize = 0; *widthOut = 0; *heightOut = 0; *channelsOut = 0; @@ -282,11 +280,9 @@ int ReadJpegFile(unsigned char * inbuffer, unsigned int outBuffLen = cinfo.image_width * cinfo.image_height * cinfo.num_components; if(*outBufferSize != 0 && *outBufferSize != outBuffLen) throw std::runtime_error("Output buffer has incorrect size"); + *outBufferSize = outBuffLen; if(*outBuffer == NULL) - { *outBuffer = new unsigned char[*outBufferSize]; - } - *outBufferSize = outBuffLen; *widthOut = cinfo.image_width; *heightOut = cinfo.image_height; *channelsOut = cinfo.num_components; From a800ecc829dbd10a4eff784ba82e0b5276e92e38 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Thu, 2 Jan 2014 00:43:18 +0000 Subject: [PATCH 235/256] Fixed type of var --- libvideolive.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libvideolive.cpp b/libvideolive.cpp index d5b791e..3bbb549 100644 --- a/libvideolive.cpp +++ b/libvideolive.cpp @@ -127,7 +127,7 @@ PyObject *DecodeAndResizeFrame(PyObject *self, PyObject *args) if(!useExistingBuff && ret > 0) { PyByteArray_Resize(outData, buffOutLen); - memcpy(PyString_AsString(outData), buffOut, buffOutLen); + memcpy(PyByteArray_AsString(outData), buffOut, buffOutLen); delete [] buffOut; } From d895471eb819af259e466eae079480af9ef66347 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Thu, 2 Jan 2014 00:48:20 +0000 Subject: [PATCH 236/256] Reuse buffer in decoding --- pixfmt.cpp | 16 ++++------------ 1 file changed, 4 insertions(+), 12 deletions(-) diff --git a/pixfmt.cpp b/pixfmt.cpp index c1891c4..06cc24e 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -254,7 +254,6 @@ int ReadJpegFile(unsigned char * inbuffer, *channelsOut = 0; /* More stuff */ - JSAMPARRAY buffer; /* Output row buffer */ int row_stride; /* physical row width in output buffer */ /* Step 1: initialize the JPEG decompression object. */ @@ -294,9 +293,6 @@ int ReadJpegFile(unsigned char * inbuffer, jpeg_start_decompress(&cinfo); /* JSAMPLEs per row in output buffer */ row_stride = cinfo.output_width * cinfo.output_components; - /* Make a one-row-high sample array that will go away when done with image */ - buffer = (*cinfo.mem->alloc_sarray) - ((j_common_ptr) &cinfo, JPOOL_IMAGE, row_stride, 1); /* Step 6: while (scan lines remain to be read) */ /* jpeg_read_scanlines(...); */ @@ -309,15 +305,11 @@ int ReadJpegFile(unsigned char * inbuffer, * Here the array is only one element long, but you could ask for * more than one scanline at a time if that's more convenient. */ - jpeg_read_scanlines(&cinfo, buffer, 1); - /* Assume put_scanline_someplace wants a pointer and sample count. */ - //put_scanline_someplace(buffer[0], row_stride); + unsigned char *buffer_array[1]; + buffer_array[0] = *outBuffer + cinfo.output_scanline * row_stride; + jpeg_read_scanlines(&cinfo, buffer_array, 1); + assert(row_stride = cinfo.image_width * cinfo.num_components); - //printf("%ld\n", (long)buffer); - //printf("%ld\n", (long)buffer[0]); - //printf("%d %d\n", (cinfo.output_scanline-1) * row_stride, *outBufferSize); - //printf("%ld %ld\n", (long)outBuffer, (long)&outBuffer[(cinfo.output_scanline-1) * row_stride]); - memcpy(&(*outBuffer)[(cinfo.output_scanline-1) * row_stride], buffer[0], row_stride); } /* Step 7: Finish decompression */ From 11f1c25ca2d0943a3a2213f2869660fda3eefe8c Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Thu, 2 Jan 2014 00:49:44 +0000 Subject: [PATCH 237/256] Remove debug code --- v4l2capture.cpp | 9 --------- 1 file changed, 9 deletions(-) diff --git a/v4l2capture.cpp b/v4l2capture.cpp index ca342ba..d2539c9 100644 --- a/v4l2capture.cpp +++ b/v4l2capture.cpp @@ -58,18 +58,12 @@ int my_ioctl(int fd, int request, void *arg, int utimeout = -1) if(r == 0) { - printf("Time out\n"); return 1; //Timed out } - else - printf("r %d %d\n", request, r); } //printf("call\n"); - if(request==VIDIOC_DQBUF) printf("VIDIOC_DQBUF\n"); - printf("VIDIOC_DQBUF = %d\n", VIDIOC_DQBUF); int result = v4l2_ioctl(fd, request, arg); - printf("v4l2_ioctl %d %d\n", request, result); if(!result) { @@ -259,16 +253,13 @@ int Video_in_Manager::ReadFrame() buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buffer.memory = V4L2_MEMORY_MMAP; - printf("Start VIDIOC_DQBUF\n"); if(my_ioctl(this->fd, VIDIOC_DQBUF, &buffer, 10000)) { return 0; } - printf("rx %d\n", buffer.bytesused); unsigned char *rgbBuff = NULL; unsigned rgbBuffLen = 0; - printf("buff index %d\n", buffer.index); int ok = DecodeFrame((const unsigned char*)this->buffers[buffer.index].start, buffer.bytesused, this->pxFmt.c_str(), this->frameWidth, From 9d473f5452da18339a556e8cfebf9c62e6da905f Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sat, 4 Jan 2014 13:03:51 +0000 Subject: [PATCH 238/256] Add warning for file writer not implemented on posix --- videooutfile.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/videooutfile.cpp b/videooutfile.cpp index 8e787c9..b694744 100644 --- a/videooutfile.cpp +++ b/videooutfile.cpp @@ -52,6 +52,8 @@ PyObject *Video_out_file_manager_open(Video_out_file_manager *self, PyObject *ar pthread_t thread; #ifdef _POSIX //TODO + PyErr_SetString(PyExc_RuntimeError, "Not implemented"); + return NULL; #endif #ifdef _NT MfVideoOutFile *threadArgs = NULL; From 4059379a7b82d88a3b5532fe3f851501feac124a Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Sat, 4 Jan 2014 13:05:51 +0000 Subject: [PATCH 239/256] Remove debug code --- videooutfile.cpp | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/videooutfile.cpp b/videooutfile.cpp index b694744..e23f708 100644 --- a/videooutfile.cpp +++ b/videooutfile.cpp @@ -34,7 +34,7 @@ void Video_out_file_manager_dealloc(Video_out_file_manager *self) PyObject *Video_out_file_manager_open(Video_out_file_manager *self, PyObject *args) { - std::cout << "Video_out_file_manager_open" << std::endl; + //std::cout << "Video_out_file_manager_open" << std::endl; //Process arguments const char *devarg = NULL; @@ -43,7 +43,6 @@ PyObject *Video_out_file_manager_open(Video_out_file_manager *self, PyObject *ar if(!PyArg_ParseTuple(args, "sii", &devarg, &widthIn, &heightIn)) { - std::cout << "Attempt to throw exception" << std::endl; PyErr_SetString(PyExc_RuntimeError, "Incorrect arguments to function."); return NULL; } From da4ae772619a7a8753eff1f9ab44838745a002ee Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Thu, 9 Jan 2014 14:03:21 +0000 Subject: [PATCH 240/256] Attempting to decode jpeg of unknown size --- libvideolive.cpp | 16 ++++---- pixfmt.cpp | 101 +++++++++++++++++++++++++++++++++++++---------- pixfmt.h | 2 +- videoout.cpp | 2 +- videooutfile.cpp | 2 +- 5 files changed, 92 insertions(+), 31 deletions(-) diff --git a/libvideolive.cpp b/libvideolive.cpp index 3bbb549..5f29ebe 100644 --- a/libvideolive.cpp +++ b/libvideolive.cpp @@ -77,23 +77,23 @@ PyObject *DecodeAndResizeFrame(PyObject *self, PyObject *args) //Input image PyObject *inPixFmt = PyTuple_GetItem(args, 0); - if(!PyString_Check(inPixFmt)) {PyErr_SetString(PyExc_TypeError, "Argument 1 must be a string."); Py_RETURN_NONE;} + if(!PyString_Check(inPixFmt)) {PyErr_SetString(PyExc_TypeError, "Argument 1 must be a string."); return NULL;} PyObject *inWidth = PyTuple_GetItem(args, 1); - if(!PyInt_Check(inWidth)) {PyErr_SetString(PyExc_TypeError, "Argument 2 must be an int."); Py_RETURN_NONE;} + if(!PyInt_Check(inWidth)) {PyErr_SetString(PyExc_TypeError, "Argument 2 must be an int."); return NULL;} PyObject *inHeight = PyTuple_GetItem(args, 2); - if(!PyInt_Check(inHeight)) {PyErr_SetString(PyExc_TypeError, "Argument 3 must be an int."); Py_RETURN_NONE;} + if(!PyInt_Check(inHeight)) {PyErr_SetString(PyExc_TypeError, "Argument 3 must be an int."); return NULL;} PyObject *inData = PyTuple_GetItem(args, 3); - if(!PyByteArray_Check(inData)) {PyErr_SetString(PyExc_TypeError, "Argument 4 must be a byte array."); Py_RETURN_NONE;} + if(!PyByteArray_Check(inData)) {PyErr_SetString(PyExc_TypeError, "Argument 4 must be a byte array."); return NULL;} //Output image PyObject *outPixFmt = PyTuple_GetItem(args, 4); - if(!PyString_Check(outPixFmt)) {PyErr_SetString(PyExc_TypeError, "Argument 5 must be a string."); Py_RETURN_NONE;} + if(!PyString_Check(outPixFmt)) {PyErr_SetString(PyExc_TypeError, "Argument 5 must be a string."); return NULL;} PyObject *outWidth = PyTuple_GetItem(args, 5); - if(!PyInt_Check(outWidth)) {PyErr_SetString(PyExc_TypeError, "Argument 6 must be an int."); Py_RETURN_NONE;} + if(!PyInt_Check(outWidth)) {PyErr_SetString(PyExc_TypeError, "Argument 6 must be an int."); return NULL;} PyObject *outHeight = PyTuple_GetItem(args, 6); - if(!PyInt_Check(outHeight)) {PyErr_SetString(PyExc_TypeError, "Argument 7 must be an int."); Py_RETURN_NONE;} + if(!PyInt_Check(outHeight)) {PyErr_SetString(PyExc_TypeError, "Argument 7 must be an int."); return NULL;} PyObject *outData = PyTuple_GetItem(args, 7); - if(!PyByteArray_Check(outData)) {PyErr_SetString(PyExc_TypeError, "Argument 8 must be a byte array."); Py_RETURN_NONE;} + if(!PyByteArray_Check(outData)) {PyErr_SetString(PyExc_TypeError, "Argument 8 must be a byte array."); return NULL;} unsigned char *buffOut = NULL; unsigned buffOutLen = 0; diff --git a/pixfmt.cpp b/pixfmt.cpp index 06cc24e..d6fb4ef 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -576,7 +576,7 @@ int ConvertYUYVtoRGB(const unsigned char *im, unsigned dataLen, int DecodeFrame(const unsigned char *data, unsigned dataLen, const char *inPxFmt, - int width, int height, + int &width, int &height, const char *targetPxFmt, unsigned char **buffOut, unsigned *buffOutLen) @@ -624,11 +624,13 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, if (!jpegOk) throw std::runtime_error("Error decoding jpeg"); - if(widthActual == width && heightActual == height) + if((widthActual == width && heightActual == height) || width == 0 || height == 0) { assert(channelsActual == 3); *buffOut = decodedBuff; *buffOutLen = decodedBuffSize; + width = widthActual; + height = heightActual; } else { @@ -952,26 +954,65 @@ int DecodeAndResizeFrame(const unsigned char *data, int dstWidth, int dstHeight) { - if(srcWidth==dstWidth && srcHeight==dstHeight) + const unsigned char *currentImg = data; + int decallocateWhenDone = 0; + unsigned currentLen = dataLen; + std::string currentPxFmt = inPxFmt; + int currentWidth = srcWidth; + int currentHeight = srcHeight; + unsigned char *tmpBuff = NULL; + unsigned tmpBuffLen = 0; + + std::cout << "a" << std::endl; + if(currentWidth==0 || currentHeight==0) + { + //Source has unknown dimensions + int ret = DecodeFrame(currentImg, currentLen, + currentPxFmt.c_str(), + currentWidth, currentHeight, + targetPxFmt, + &tmpBuff, + &tmpBuffLen); + + //Free intermediate buff + //probably not needed at this stage but good consistency + if(decallocateWhenDone && currentImg != NULL) + { + delete [] currentImg; + currentImg = NULL; + currentLen = 0; + } + + currentImg = tmpBuff; + currentLen = tmpBuffLen; + currentPxFmt = targetPxFmt; + decallocateWhenDone = 1; + } + + std::cout << "b" << std::endl; + + if((currentWidth==dstWidth && currentHeight==dstHeight) || dstWidth == 0 || dstHeight == 0) { //Resize is not required - int ret = DecodeFrame(data, dataLen, - inPxFmt, - srcWidth, srcHeight, + int ret = DecodeFrame(currentImg, currentLen, + currentPxFmt.c_str(), + currentWidth, currentHeight, targetPxFmt, buffOut, buffOutLen); + + //Free intermediate buff + if(decallocateWhenDone && currentImg != NULL) + { + delete [] currentImg; + currentImg = NULL; + currentLen = 0; + } + + std::cout << "c" << std::endl; return ret; } - const unsigned char *currentImg = data; - unsigned currentLen = dataLen; - std::string currentPxFmt = inPxFmt; - int currentWidth = srcWidth; - int currentHeight = srcHeight; - - unsigned char *tmpBuff = NULL; - unsigned tmpBuffLen = 0; int resizeRet = ResizeFrame(currentImg, currentLen, currentPxFmt.c_str(), @@ -983,8 +1024,17 @@ int DecodeAndResizeFrame(const unsigned char *data, if(resizeRet > 0) { + //Free intermediate buff + if(decallocateWhenDone && currentImg != NULL) + { + delete [] currentImg; + currentImg = NULL; + currentLen = 0; + } + //Resize succeeded currentImg = tmpBuff; + decallocateWhenDone = 1; currentLen = tmpBuffLen; currentWidth = dstWidth; currentHeight = dstHeight; @@ -997,10 +1047,11 @@ int DecodeAndResizeFrame(const unsigned char *data, buffOutLen); //Free intermediate buff - if(tmpBuff != NULL) + if(decallocateWhenDone && currentImg != NULL) { - delete [] tmpBuff; - tmpBuff = NULL; + delete [] currentImg; + currentImg = NULL; + currentLen = 0; } return decodeRet; @@ -1017,7 +1068,16 @@ int DecodeAndResizeFrame(const unsigned char *data, &tmpBuffLen); if(decodeRet <= 0) + { + //Free intermediate buff + if(decallocateWhenDone && currentImg != NULL) + { + delete [] currentImg; + currentImg = NULL; + currentLen = 0; + } return 0; //Conversion failed + } //Now resize resizeRet = ResizeFrame(tmpBuff, @@ -1030,10 +1090,11 @@ int DecodeAndResizeFrame(const unsigned char *data, dstHeight); //Free intermediate buff - if(tmpBuff != NULL) + if(decallocateWhenDone && currentImg != NULL) { - delete [] tmpBuff; - tmpBuff = NULL; + delete [] currentImg; + currentImg = NULL; + currentLen = 0; } return resizeRet; diff --git a/pixfmt.h b/pixfmt.h index 4930341..c7138cd 100644 --- a/pixfmt.h +++ b/pixfmt.h @@ -6,7 +6,7 @@ int DecodeFrame(const unsigned char *data, unsigned dataLen, const char *inPxFmt, - int width, int height, + int &width, int &height, const char *targetPxFmt, unsigned char **buffOut, unsigned *buffOutLen); diff --git a/videoout.cpp b/videoout.cpp index 573fce0..12f5ab5 100644 --- a/videoout.cpp +++ b/videoout.cpp @@ -99,7 +99,7 @@ PyObject *Video_out_manager_Send_frame(Video_out_manager *self, PyObject *args) Py_ssize_t imgLen = PyObject_Length(pyimg); if(imgIn == NULL) - PyErr_SetString(PyExc_RuntimeError, "Argument 2 must be a string or bytearray."); + {PyErr_SetString(PyExc_RuntimeError, "Argument 2 must be a string or bytearray."); return NULL;} PyObject *pyPxFmt = PyTuple_GetItem(args, 2); pxFmtIn = PyString_AsString(pyPxFmt); diff --git a/videooutfile.cpp b/videooutfile.cpp index e23f708..e5d1437 100644 --- a/videooutfile.cpp +++ b/videooutfile.cpp @@ -119,7 +119,7 @@ PyObject *Video_out_file_manager_Send_frame(Video_out_file_manager *self, PyObje Py_ssize_t imgLen = PyObject_Length(pyimg); if(imgIn == NULL) - PyErr_SetString(PyExc_RuntimeError, "Argument 2 must be a string or byte array."); + {PyErr_SetString(PyExc_RuntimeError, "Argument 2 must be a string or byte array.");return NULL;} PyObject *pyPxFmt = PyTuple_GetItem(args, 2); pxFmtIn = PyString_AsString(pyPxFmt); From d3bd8bdbeb23fd8e82ff4088795db444d84465c7 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Thu, 9 Jan 2014 14:25:50 +0000 Subject: [PATCH 241/256] Cant find cause of exception err --- libvideolive.cpp | 25 ++++++++++++++++--------- pixfmt.cpp | 7 +++++-- pixfmt.h | 4 ++-- 3 files changed, 23 insertions(+), 13 deletions(-) diff --git a/libvideolive.cpp b/libvideolive.cpp index 5f29ebe..943c5b5 100644 --- a/libvideolive.cpp +++ b/libvideolive.cpp @@ -13,6 +13,7 @@ #include #include #include +#include #include #include "pixfmt.h" #include "videoout.h" @@ -60,6 +61,7 @@ PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) PyObject *DecodeAndResizeFrame(PyObject *self, PyObject *args) { + std::cout << "DecodeAndResizeFrame start" << std::endl; //0 string src pixFormat //1 int src width //2 int src height @@ -108,15 +110,18 @@ PyObject *DecodeAndResizeFrame(PyObject *self, PyObject *args) int ret = 0; try { - ret = DecodeAndResizeFrame((unsigned char*)PyByteArray_AsString(inData), - PyString_Size(inData), - PyString_AsString(inPixFmt), - PyInt_AsLong(inWidth), PyInt_AsLong(inHeight), - PyString_AsString(outPixFmt), - &buffOut, - &buffOutLen, - PyInt_AsLong(outWidth), - PyInt_AsLong(outHeight)); + int outWidthInt = PyInt_AsLong(outWidth); + int outHeightInt = PyInt_AsLong(outHeight); + + ret = DecodeAndResizeFrame((unsigned char*)PyByteArray_AsString(inData), + PyString_Size(inData), + PyString_AsString(inPixFmt), + PyInt_AsLong(inWidth), PyInt_AsLong(inHeight), + PyString_AsString(outPixFmt), + &buffOut, + &buffOutLen, + outWidthInt, + outHeightInt); } catch(std::exception &err) { @@ -131,6 +136,8 @@ PyObject *DecodeAndResizeFrame(PyObject *self, PyObject *args) delete [] buffOut; } + std::cout << "DecodeAndResizeFrame end" << std::endl; + return PyInt_FromLong(ret); } diff --git a/pixfmt.cpp b/pixfmt.cpp index d6fb4ef..9cdc331 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -951,8 +951,8 @@ int DecodeAndResizeFrame(const unsigned char *data, const char *targetPxFmt, unsigned char **buffOut, unsigned *buffOutLen, - int dstWidth, - int dstHeight) + int &dstWidth, + int &dstHeight) { const unsigned char *currentImg = data; int decallocateWhenDone = 0; @@ -1009,6 +1009,9 @@ int DecodeAndResizeFrame(const unsigned char *data, currentLen = 0; } + dstWidth = currentWidth; + dstHeight = currentHeight; + std::cout << "c" << std::endl; return ret; } diff --git a/pixfmt.h b/pixfmt.h index c7138cd..e7275b3 100644 --- a/pixfmt.h +++ b/pixfmt.h @@ -18,8 +18,8 @@ int DecodeAndResizeFrame(const unsigned char *data, const char *targetPxFmt, unsigned char **buffOut, unsigned *buffOutLen, - int dstWidth, - int dstHeight); + int &dstWidth, + int &dstHeight); int ResizeFrame(const unsigned char *data, unsigned dataLen, From a5340eeee6268621ddd7a2b4c27c99251d4523b5 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Thu, 9 Jan 2014 14:40:38 +0000 Subject: [PATCH 242/256] PyErr is set in an unusual way --- libvideolive.cpp | 7 +++++-- pixfmt.cpp | 4 ---- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/libvideolive.cpp b/libvideolive.cpp index 943c5b5..b8bcd1c 100644 --- a/libvideolive.cpp +++ b/libvideolive.cpp @@ -61,7 +61,9 @@ PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) PyObject *DecodeAndResizeFrame(PyObject *self, PyObject *args) { - std::cout << "DecodeAndResizeFrame start" << std::endl; + if (PyErr_Occurred() != NULL) + throw std::runtime_error("Python error set with unexpected state."); + //0 string src pixFormat //1 int src width //2 int src height @@ -136,7 +138,8 @@ PyObject *DecodeAndResizeFrame(PyObject *self, PyObject *args) delete [] buffOut; } - std::cout << "DecodeAndResizeFrame end" << std::endl; + if (PyErr_Occurred() != NULL) + throw std::runtime_error("Python error set with unexpected state."); return PyInt_FromLong(ret); } diff --git a/pixfmt.cpp b/pixfmt.cpp index 9cdc331..fa24d3f 100644 --- a/pixfmt.cpp +++ b/pixfmt.cpp @@ -963,7 +963,6 @@ int DecodeAndResizeFrame(const unsigned char *data, unsigned char *tmpBuff = NULL; unsigned tmpBuffLen = 0; - std::cout << "a" << std::endl; if(currentWidth==0 || currentHeight==0) { //Source has unknown dimensions @@ -989,8 +988,6 @@ int DecodeAndResizeFrame(const unsigned char *data, decallocateWhenDone = 1; } - std::cout << "b" << std::endl; - if((currentWidth==dstWidth && currentHeight==dstHeight) || dstWidth == 0 || dstHeight == 0) { //Resize is not required @@ -1012,7 +1009,6 @@ int DecodeAndResizeFrame(const unsigned char *data, dstWidth = currentWidth; dstHeight = currentHeight; - std::cout << "c" << std::endl; return ret; } From 6eaa11a800aaaeccdd062e68ebbc8c4815587530 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Thu, 9 Jan 2014 14:41:28 +0000 Subject: [PATCH 243/256] PyErr is set in an unusual way --- libvideolive.cpp | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/libvideolive.cpp b/libvideolive.cpp index b8bcd1c..654b4e3 100644 --- a/libvideolive.cpp +++ b/libvideolive.cpp @@ -64,6 +64,8 @@ PyObject *DecodeAndResizeFrame(PyObject *self, PyObject *args) if (PyErr_Occurred() != NULL) throw std::runtime_error("Python error set with unexpected state."); + std::cout << "a" << (PyErr_Occurred() != NULL) << std::endl; + //0 string src pixFormat //1 int src width //2 int src height @@ -99,6 +101,8 @@ PyObject *DecodeAndResizeFrame(PyObject *self, PyObject *args) PyObject *outData = PyTuple_GetItem(args, 7); if(!PyByteArray_Check(outData)) {PyErr_SetString(PyExc_TypeError, "Argument 8 must be a byte array."); return NULL;} + std::cout << "b" << (PyErr_Occurred() != NULL) << std::endl; + unsigned char *buffOut = NULL; unsigned buffOutLen = 0; int useExistingBuff = 0; @@ -131,12 +135,16 @@ PyObject *DecodeAndResizeFrame(PyObject *self, PyObject *args) return NULL; } + std::cout << "c" << (PyErr_Occurred() != NULL) << std::endl; + if(!useExistingBuff && ret > 0) { PyByteArray_Resize(outData, buffOutLen); memcpy(PyByteArray_AsString(outData), buffOut, buffOutLen); delete [] buffOut; } + + std::cout << "d" << (PyErr_Occurred() != NULL) << std::endl; if (PyErr_Occurred() != NULL) throw std::runtime_error("Python error set with unexpected state."); From 09d660c0b1468797a4f9a96c254a5aea861e4fec Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Thu, 9 Jan 2014 14:55:23 +0000 Subject: [PATCH 244/256] Using wrong function to get type of bytearray --- libvideolive.cpp | 26 ++++++++++++-------------- 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/libvideolive.cpp b/libvideolive.cpp index 654b4e3..72e0b2a 100644 --- a/libvideolive.cpp +++ b/libvideolive.cpp @@ -59,13 +59,11 @@ PyObject *InsertHuffmanTable(PyObject *self, PyObject *args) return outBufferPy; } -PyObject *DecodeAndResizeFrame(PyObject *self, PyObject *args) +PyObject *DecodeAndResizeFrameHighLevel(PyObject *self, PyObject *args) { if (PyErr_Occurred() != NULL) throw std::runtime_error("Python error set with unexpected state."); - std::cout << "a" << (PyErr_Occurred() != NULL) << std::endl; - //0 string src pixFormat //1 int src width //2 int src height @@ -101,8 +99,6 @@ PyObject *DecodeAndResizeFrame(PyObject *self, PyObject *args) PyObject *outData = PyTuple_GetItem(args, 7); if(!PyByteArray_Check(outData)) {PyErr_SetString(PyExc_TypeError, "Argument 8 must be a byte array."); return NULL;} - std::cout << "b" << (PyErr_Occurred() != NULL) << std::endl; - unsigned char *buffOut = NULL; unsigned buffOutLen = 0; int useExistingBuff = 0; @@ -119,15 +115,21 @@ PyObject *DecodeAndResizeFrame(PyObject *self, PyObject *args) int outWidthInt = PyInt_AsLong(outWidth); int outHeightInt = PyInt_AsLong(outHeight); - ret = DecodeAndResizeFrame((unsigned char*)PyByteArray_AsString(inData), - PyString_Size(inData), - PyString_AsString(inPixFmt), + unsigned char *inDataC = (unsigned char*)PyByteArray_AsString(inData); + long inDataLen = PyByteArray_Size(inData); + char *inPixFmtC = PyString_AsString(inPixFmt); + char *outPixFmtC = PyString_AsString(outPixFmt); + + ret = DecodeAndResizeFrame(inDataC, + inDataLen, + inPixFmtC, PyInt_AsLong(inWidth), PyInt_AsLong(inHeight), - PyString_AsString(outPixFmt), + outPixFmtC, &buffOut, &buffOutLen, outWidthInt, outHeightInt); + } catch(std::exception &err) { @@ -135,8 +137,6 @@ PyObject *DecodeAndResizeFrame(PyObject *self, PyObject *args) return NULL; } - std::cout << "c" << (PyErr_Occurred() != NULL) << std::endl; - if(!useExistingBuff && ret > 0) { PyByteArray_Resize(outData, buffOutLen); @@ -144,8 +144,6 @@ PyObject *DecodeAndResizeFrame(PyObject *self, PyObject *args) delete [] buffOut; } - std::cout << "d" << (PyErr_Occurred() != NULL) << std::endl; - if (PyErr_Occurred() != NULL) throw std::runtime_error("Python error set with unexpected state."); @@ -254,7 +252,7 @@ static PyTypeObject Video_out_file_manager_type = { static PyMethodDef module_methods[] = { { "InsertHuffmanTable", (PyCFunction)InsertHuffmanTable, METH_VARARGS, NULL }, - { "DecodeAndResizeFrame", (PyCFunction)DecodeAndResizeFrame, METH_VARARGS, NULL }, + { "DecodeAndResizeFrame", (PyCFunction)DecodeAndResizeFrameHighLevel, METH_VARARGS, NULL }, { NULL, NULL, 0, NULL } }; From 4b73c6a53267dc98a704a7509d9b52ec749b3e71 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Thu, 9 Jan 2014 15:04:33 +0000 Subject: [PATCH 245/256] Using wrong function to get type of bytearray --- libvideolive.cpp | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/libvideolive.cpp b/libvideolive.cpp index 72e0b2a..1cf4a73 100644 --- a/libvideolive.cpp +++ b/libvideolive.cpp @@ -75,7 +75,7 @@ PyObject *DecodeAndResizeFrameHighLevel(PyObject *self, PyObject *args) if(PyTuple_Size(args) < 8) { - PyErr_SetString(PyExc_TypeError, "Function requires 8 arguments"); + PyErr_SetString(PyExc_TypeError, "Function requires 8 arguments (and 1 optional)"); return NULL; } @@ -99,6 +99,17 @@ PyObject *DecodeAndResizeFrameHighLevel(PyObject *self, PyObject *args) PyObject *outData = PyTuple_GetItem(args, 7); if(!PyByteArray_Check(outData)) {PyErr_SetString(PyExc_TypeError, "Argument 8 must be a byte array."); return NULL;} + //Optional arguments + PyObject *metaOut = NULL; + if(PyTuple_Size(args) >= 8) + { + PyObject *metaOut = PyTuple_GetItem(args, 8); + if(!PyDict_Check(metaOut) && metaOut != Py_None) + {PyErr_SetString(PyExc_TypeError, "Argument 9 (if set) must be a dict or None."); return NULL;} + if(metaOut==Py_None) + metaOut = NULL; + } + unsigned char *buffOut = NULL; unsigned buffOutLen = 0; int useExistingBuff = 0; @@ -130,6 +141,13 @@ PyObject *DecodeAndResizeFrameHighLevel(PyObject *self, PyObject *args) outWidthInt, outHeightInt); + if(metaOut!=NULL && ret > 0) + { + PyDict_SetItemString(metaOut, "width", PyInt_FromLong(outWidthInt)); + PyDict_SetItemString(metaOut, "height", PyInt_FromLong(outHeightInt)); + PyDict_SetItemString(metaOut, "format", PyString_FromString(outPixFmtC); + } + } catch(std::exception &err) { From 6ee51af00ab4581458dc249bdcaa3e126e2e9983 Mon Sep 17 00:00:00 2001 From: Tim Sheerman-Chase Date: Thu, 9 Jan 2014 15:13:22 +0000 Subject: [PATCH 246/256] Optional argument for meta data --- libvideolive.cpp | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/libvideolive.cpp b/libvideolive.cpp index 1cf4a73..aa88c91 100644 --- a/libvideolive.cpp +++ b/libvideolive.cpp @@ -101,11 +101,14 @@ PyObject *DecodeAndResizeFrameHighLevel(PyObject *self, PyObject *args) //Optional arguments PyObject *metaOut = NULL; - if(PyTuple_Size(args) >= 8) + if(PyTuple_Size(args) >= 9) { - PyObject *metaOut = PyTuple_GetItem(args, 8); + metaOut = PyTuple_GetItem(args, 8); if(!PyDict_Check(metaOut) && metaOut != Py_None) - {PyErr_SetString(PyExc_TypeError, "Argument 9 (if set) must be a dict or None."); return NULL;} + { + PyErr_SetString(PyExc_TypeError, "Argument 9 (if set) must be a dict or None."); + return NULL; + } if(metaOut==Py_None) metaOut = NULL; } @@ -145,7 +148,7 @@ PyObject *DecodeAndResizeFrameHighLevel(PyObject *self, PyObject *args) { PyDict_SetItemString(metaOut, "width", PyInt_FromLong(outWidthInt)); PyDict_SetItemString(metaOut, "height", PyInt_FromLong(outHeightInt)); - PyDict_SetItemString(metaOut, "format", PyString_FromString(outPixFmtC); + PyDict_SetItemString(metaOut, "format", PyString_FromString(outPixFmtC)); } } From c8fc2018ffd8e2e02fe5724e738b952d33fcc0cc Mon Sep 17 00:00:00 2001 From: TimSC Date: Fri, 10 Jan 2014 10:01:46 +0000 Subject: [PATCH 247/256] Fix type in decode func --- mfvideoin.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index d599fce..a911ae3 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -597,9 +597,11 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) //Do conversion to rgb unsigned char *buffConv = NULL; unsigned buffConvLen = 0; + int widthTmp = this->widthBuff[0]; + int heightTmp = this->heightBuff[0]; int ok = DecodeFrame(currentBuff, currentBuffLen, currentPixFmt.c_str(), - this->widthBuff[0], this->heightBuff[0], + widthTmp, heightTmp, "RGB24", &buffConv, &buffConvLen); From 72a91944d2f35b2734c8c81093a1bce1afe16a26 Mon Sep 17 00:00:00 2001 From: TimSC Date: Sat, 11 Jan 2014 20:03:17 +0000 Subject: [PATCH 248/256] Deallocate video out buffer when done --- namedpipeout.cpp | 13 ++++++++++--- videoout.cpp | 2 +- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/namedpipeout.cpp b/namedpipeout.cpp index 78cc03c..4ccd4f9 100644 --- a/namedpipeout.cpp +++ b/namedpipeout.cpp @@ -207,6 +207,7 @@ VOID GetAnswerToRequest(char *pReply, LPDWORD pchBytes, class InstanceConfig &in parent->Lock(); //Copy and resize frame if necessary (and invert y) + //TODO use bilinear sampling? ResizeRgb24ImageNN(parent->currentFrame, parent->currentFrameLen, parent->currentFrameWidth, parent->currentFrameHeight, @@ -302,7 +303,6 @@ void NamedPipeOut::SendFrame(const char *imgIn, unsigned imgLen, const char *pxF cout << "NamedPipeOut::SendFrame" << endl; //Convert from input pxFmt to BGR24. - unsigned char *bgrBuff = NULL; unsigned bgrBuffLen = 0; int ret = DecodeFrame((unsigned char*)imgIn, imgLen, @@ -312,20 +312,27 @@ void NamedPipeOut::SendFrame(const char *imgIn, unsigned imgLen, const char *pxF &bgrBuff, &bgrBuffLen); - if(ret>0) + if(ret>0 && bgrBuff != NULL) { this->Lock(); if(bgrBuffLen > this->currentFrameAlloc || this->currentFrame == NULL) { - delete [] this->currentFrame; + //Resize current frame buffer + if(this->currentFrame != NULL) delete [] this->currentFrame; this->currentFrame = new unsigned char [bgrBuffLen]; this->currentFrameAlloc = bgrBuffLen; } + //Copy new frame to local storage memcpy(this->currentFrame, bgrBuff, bgrBuffLen); this->currentFrameWidth = width; this->currentFrameHeight = height; + //Free temporary buffer + delete [] bgrBuff; + bgrBuff = NULL; + bgrBuffLen = 0; + this->currentFrameLen = bgrBuffLen; this->UnLock(); } diff --git a/videoout.cpp b/videoout.cpp index 12f5ab5..85722d4 100644 --- a/videoout.cpp +++ b/videoout.cpp @@ -93,7 +93,7 @@ PyObject *Video_out_manager_Send_frame(Video_out_manager *self, PyObject *args) devarg = PyString_AsString(pydev); PyObject *pyimg = PyTuple_GetItem(args, 1); - imgIn = PyString_AsString(pyimg); + imgIn = NULL; if(imgIn==NULL && PyString_Check(pyimg)) imgIn = PyString_AsString(pyimg); if(imgIn==NULL && PyByteArray_Check(pyimg)) imgIn = PyByteArray_AsString(pyimg); Py_ssize_t imgLen = PyObject_Length(pyimg); From 312ff6cf5a37ff1196a8d7f895079509cd724cf5 Mon Sep 17 00:00:00 2001 From: TimSC Date: Sat, 11 Jan 2014 20:21:39 +0000 Subject: [PATCH 249/256] Do not overwrite buffer size --- namedpipeout.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/namedpipeout.cpp b/namedpipeout.cpp index 4ccd4f9..f27177b 100644 --- a/namedpipeout.cpp +++ b/namedpipeout.cpp @@ -327,13 +327,13 @@ void NamedPipeOut::SendFrame(const char *imgIn, unsigned imgLen, const char *pxF memcpy(this->currentFrame, bgrBuff, bgrBuffLen); this->currentFrameWidth = width; this->currentFrameHeight = height; + this->currentFrameLen = bgrBuffLen; //Free temporary buffer delete [] bgrBuff; bgrBuff = NULL; bgrBuffLen = 0; - this->currentFrameLen = bgrBuffLen; this->UnLock(); } else From 4fc41e0ffafb6c397d316b417b718c11448a114a Mon Sep 17 00:00:00 2001 From: TimSC Date: Mon, 13 Jan 2014 20:56:17 +0000 Subject: [PATCH 250/256] Add build commands as comments --- setup.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/setup.py b/setup.py index e3848f4..488892c 100755 --- a/setup.py +++ b/setup.py @@ -10,6 +10,10 @@ # 2013, Tim Sheerman-Chase # See README for license +#SET VS90COMNTOOLS=%VS100COMNTOOLS% +#python setup.py build -c msvc +#python setup.py install + from distutils.core import Extension, setup import os From 37d5d43d3d1821c4cd4ecb0894af2bac3faebab3 Mon Sep 17 00:00:00 2001 From: TimSC Date: Thu, 30 Jan 2014 19:13:35 +0000 Subject: [PATCH 251/256] Update video in example --- setup.py | 2 +- videoin.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 488892c..c90adfe 100755 --- a/setup.py +++ b/setup.py @@ -17,7 +17,7 @@ from distutils.core import Extension, setup import os -debug = 1 +debug = 0 if os.name == "nt": if debug: diff --git a/videoin.py b/videoin.py index a5aa754..f47c9f3 100644 --- a/videoin.py +++ b/videoin.py @@ -2,7 +2,7 @@ import videolive, time if __name__=="__main__": - inManager = videolive.Video_in_manager() + inManager = videolive.Video_in_stream_manager() print inManager devs = inManager.list_devices() From 56feea5b1b267549044ebded8efcd5f2d6a8de70 Mon Sep 17 00:00:00 2001 From: TimSC Date: Thu, 30 Jan 2014 21:14:54 +0000 Subject: [PATCH 252/256] Fixed one memory leak, suspect more leaks are present --- mfvideoin.cpp | 18 ++++++++++++++++++ videoin.py | 2 +- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index a911ae3..99be490 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -583,6 +583,23 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) return 0; } + //TEMP CODE TO DELETE BUFFER AND RETURN 0 + /*while(this->frameBuff.size() > 0) + { + delete [] this->frameBuff[0]; + this->frameBuff.erase(this->frameBuff.begin()); + this->frameLenBuff.erase(this->frameLenBuff.begin()); + this->hrStatusBuff.erase(this->hrStatusBuff.begin()); + this->dwStreamIndexBuff.erase(this->dwStreamIndexBuff.begin()); + this->dwStreamFlagsBuff.erase(this->dwStreamFlagsBuff.begin()); + this->llTimestampBuff.erase(this->llTimestampBuff.begin()); + + this->PopFrontMetaDataBuff(); + } + LeaveCriticalSection(&lock); + return 0;*/ + //END OF TEMP CODE + unsigned char* currentBuff = (unsigned char *)this->frameBuff[0]; std::string currentPixFmt = "Unknown"; unsigned currentBuffLen = this->frameLenBuff[0]; @@ -941,6 +958,7 @@ void MfVideoIn::SetSampleMetaData(DWORD streamIndex) this->heightBuff.push_back(height); this->isCompressedBuff.push_back(isComp); + SafeRelease(&pCurrentType); } void MfVideoIn::PopFrontMetaDataBuff() diff --git a/videoin.py b/videoin.py index f47c9f3..6cf8a59 100644 --- a/videoin.py +++ b/videoin.py @@ -18,7 +18,7 @@ inManager.start(devs[0][0]) count = 0 - while count < 10: + while 1: time.sleep(0.01) frame = inManager.get_frame(devs[0][0]) if frame is None: continue From 4b9124ed05e923d1b79bb2b8c82522a7d56d6f88 Mon Sep 17 00:00:00 2001 From: TimSC Date: Thu, 30 Jan 2014 22:34:35 +0000 Subject: [PATCH 253/256] Hacked version with minimal or no leak --- mfvideoin.cpp | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index 99be490..8fdfc8d 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -635,7 +635,7 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) cout << "Cannot convert from pix format "; wcout << this->subTypeBuff[0] << endl; } - + /* *buffOut = currentBuff; metaOut->fmt = currentPixFmt; metaOut->width = this->widthBuff[0]; @@ -644,6 +644,8 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) metaOut->sequence = 0; metaOut->tv_sec = (unsigned long)(this->llTimestampBuff[0] / 1e7); //in 100-nanosecond units metaOut->tv_usec = (unsigned long)((this->llTimestampBuff[0] - metaOut->tv_sec * 1e7) / 10); + */ + delete [] currentBuff; this->frameBuff.erase(this->frameBuff.begin()); this->frameLenBuff.erase(this->frameLenBuff.begin()); @@ -655,6 +657,7 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) this->PopFrontMetaDataBuff(); LeaveCriticalSection(&lock); + return 0; return 1; } From 890abfdaf692e7230d605ff504c2044170a04060 Mon Sep 17 00:00:00 2001 From: TimSC Date: Thu, 30 Jan 2014 23:11:50 +0000 Subject: [PATCH 254/256] Another fairly non leaky version with more enabled --- mfvideoin.cpp | 5 +---- videoin.cpp | 12 +++++++++++- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index 8fdfc8d..d18b644 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -635,7 +635,7 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) cout << "Cannot convert from pix format "; wcout << this->subTypeBuff[0] << endl; } - /* + *buffOut = currentBuff; metaOut->fmt = currentPixFmt; metaOut->width = this->widthBuff[0]; @@ -644,8 +644,6 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) metaOut->sequence = 0; metaOut->tv_sec = (unsigned long)(this->llTimestampBuff[0] / 1e7); //in 100-nanosecond units metaOut->tv_usec = (unsigned long)((this->llTimestampBuff[0] - metaOut->tv_sec * 1e7) / 10); - */ - delete [] currentBuff; this->frameBuff.erase(this->frameBuff.begin()); this->frameLenBuff.erase(this->frameLenBuff.begin()); @@ -657,7 +655,6 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) this->PopFrontMetaDataBuff(); LeaveCriticalSection(&lock); - return 0; return 1; } diff --git a/videoin.cpp b/videoin.cpp index 54c5ee6..758b6cb 100644 --- a/videoin.cpp +++ b/videoin.cpp @@ -171,10 +171,13 @@ PyObject *Device_manager_Get_frame(Device_manager *self, PyObject *args) return NULL; } + if(buffOut!= NULL) + delete [] buffOut; + if(ok && buffOut != NULL) { //Format output to python - PyObject *pymeta = PyDict_New(); + /*PyObject *pymeta = PyDict_New(); PyDict_SetItemString(pymeta, "width", PyInt_FromLong(metaOut.width)); PyDict_SetItemString(pymeta, "height", PyInt_FromLong(metaOut.height)); PyDict_SetItemString(pymeta, "format", PyString_FromString(metaOut.fmt.c_str())); @@ -187,6 +190,13 @@ PyObject *Device_manager_Get_frame(Device_manager *self, PyObject *args) PyTuple_SetItem(out, 1, pymeta); delete [] buffOut; + return out;*/ + + PyObject *out = PyTuple_New(2); + PyObject *test = PyDict_New(); + PyTuple_SetItem(out, 0, test); + PyObject *pymeta = PyDict_New(); + PyTuple_SetItem(out, 1, pymeta); return out; } From 7eeef9b49f0a5f9e45655f2e35c9feb8b84d3eab Mon Sep 17 00:00:00 2001 From: TimSC Date: Fri, 31 Jan 2014 00:13:50 +0000 Subject: [PATCH 255/256] Found another memory leak --- videoin.cpp | 37 ++++++++++++++++++++----------------- 1 file changed, 20 insertions(+), 17 deletions(-) diff --git a/videoin.cpp b/videoin.cpp index 758b6cb..0c113c8 100644 --- a/videoin.cpp +++ b/videoin.cpp @@ -138,6 +138,12 @@ PyObject *Device_manager_Start(Device_manager *self, PyObject *args) Py_RETURN_NONE; } +void PyDict_SetItemString_Decref(PyObject *dic, const char *key, PyObject *val) +{ + PyDict_SetItemString(dic, key, val); + Py_DECREF(val); +} + PyObject *Device_manager_Get_frame(Device_manager *self, PyObject *args) { //std::cout << "Device_manager_Get_frame" << std::endl; @@ -171,34 +177,31 @@ PyObject *Device_manager_Get_frame(Device_manager *self, PyObject *args) return NULL; } - if(buffOut!= NULL) - delete [] buffOut; - if(ok && buffOut != NULL) { //Format output to python - /*PyObject *pymeta = PyDict_New(); - PyDict_SetItemString(pymeta, "width", PyInt_FromLong(metaOut.width)); - PyDict_SetItemString(pymeta, "height", PyInt_FromLong(metaOut.height)); - PyDict_SetItemString(pymeta, "format", PyString_FromString(metaOut.fmt.c_str())); - PyDict_SetItemString(pymeta, "sequence", PyInt_FromLong(metaOut.sequence)); - PyDict_SetItemString(pymeta, "tv_sec", PyInt_FromLong(metaOut.tv_sec)); - PyDict_SetItemString(pymeta, "tv_usec", PyInt_FromLong(metaOut.tv_usec)); + PyObject *pymeta = PyDict_New(); + PyDict_SetItemString_Decref(pymeta, "width", PyInt_FromLong(metaOut.width)); + PyDict_SetItemString_Decref(pymeta, "height", PyInt_FromLong(metaOut.height)); + PyDict_SetItemString_Decref(pymeta, "format", PyString_FromString(metaOut.fmt.c_str())); + PyDict_SetItemString_Decref(pymeta, "sequence", PyInt_FromLong(metaOut.sequence)); + PyDict_SetItemString_Decref(pymeta, "tv_sec", PyInt_FromLong(metaOut.tv_sec)); + PyDict_SetItemString_Decref(pymeta, "tv_usec", PyInt_FromLong(metaOut.tv_usec)); PyObject *out = PyTuple_New(2); + PyTuple_SetItem(out, 0, PyByteArray_FromStringAndSize((char *)buffOut, metaOut.buffLen)); + //PyObject *test = PyDict_New(); + //PyTuple_SetItem(out, 0, test); + PyTuple_SetItem(out, 1, pymeta); delete [] buffOut; - return out;*/ - - PyObject *out = PyTuple_New(2); - PyObject *test = PyDict_New(); - PyTuple_SetItem(out, 0, test); - PyObject *pymeta = PyDict_New(); - PyTuple_SetItem(out, 1, pymeta); return out; } + + if(!ok && buffOut!= NULL) //This generally should not happen + delete [] buffOut; Py_RETURN_NONE; } From 12965ad7057c6fd3976b4120ae1bac15ae21f214 Mon Sep 17 00:00:00 2001 From: TimSC Date: Fri, 31 Jan 2014 00:14:23 +0000 Subject: [PATCH 256/256] Remove test code --- mfvideoin.cpp | 17 ----------------- videoin.cpp | 4 ---- 2 files changed, 21 deletions(-) diff --git a/mfvideoin.cpp b/mfvideoin.cpp index d18b644..f26ee2a 100644 --- a/mfvideoin.cpp +++ b/mfvideoin.cpp @@ -583,23 +583,6 @@ int MfVideoIn::GetFrame(unsigned char **buffOut, class FrameMetaData *metaOut) return 0; } - //TEMP CODE TO DELETE BUFFER AND RETURN 0 - /*while(this->frameBuff.size() > 0) - { - delete [] this->frameBuff[0]; - this->frameBuff.erase(this->frameBuff.begin()); - this->frameLenBuff.erase(this->frameLenBuff.begin()); - this->hrStatusBuff.erase(this->hrStatusBuff.begin()); - this->dwStreamIndexBuff.erase(this->dwStreamIndexBuff.begin()); - this->dwStreamFlagsBuff.erase(this->dwStreamFlagsBuff.begin()); - this->llTimestampBuff.erase(this->llTimestampBuff.begin()); - - this->PopFrontMetaDataBuff(); - } - LeaveCriticalSection(&lock); - return 0;*/ - //END OF TEMP CODE - unsigned char* currentBuff = (unsigned char *)this->frameBuff[0]; std::string currentPixFmt = "Unknown"; unsigned currentBuffLen = this->frameLenBuff[0]; diff --git a/videoin.cpp b/videoin.cpp index 0c113c8..c699191 100644 --- a/videoin.cpp +++ b/videoin.cpp @@ -189,11 +189,7 @@ PyObject *Device_manager_Get_frame(Device_manager *self, PyObject *args) PyDict_SetItemString_Decref(pymeta, "tv_usec", PyInt_FromLong(metaOut.tv_usec)); PyObject *out = PyTuple_New(2); - PyTuple_SetItem(out, 0, PyByteArray_FromStringAndSize((char *)buffOut, metaOut.buffLen)); - //PyObject *test = PyDict_New(); - //PyTuple_SetItem(out, 0, test); - PyTuple_SetItem(out, 1, pymeta); delete [] buffOut;