video.cpp 22.1 KB
Newer Older
1
/**
2 3 4
 * @file video.cpp
 * @brief Provides video interface for streamer
 * @copyright Copyright (C) 2017 Elphel Inc.
5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21
 * @author AUTHOR <EMAIL>
 *
 * @par License:
 *  This program is free software: you can redistribute it and/or modify
 *  it under the terms of the GNU General Public License as published by
 *  the Free Software Foundation, either version 3 of the License, or
 *  (at your option) any later version.
 *
 *  This program is distributed in the hope that it will be useful,
 *  but WITHOUT ANY WARRANTY; without even the implied warranty of
 *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 *  GNU General Public License for more details.
 *
 *  You should have received a copy of the GNU General Public License
 *  along with this program.  If not, see <http://www.gnu.org/licenses/>.
*/

22 23 24 25 26 27 28 29 30 31
#include "video.h"

#include <sys/types.h>
#include <sys/stat.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <arpa/inet.h>
#include <fcntl.h>
#include <unistd.h>
#include <iostream>
32 33 34
#include <sstream>
#include <string>
#include <elphel/x393_devices.h>
35
#include <iomanip>
36

37
#include "streamer.h"
38

39 40
using namespace std;

41
//#undef VIDEO_DEBUG
42
//#undef VIDEO_DEBUG_2	                                        // for timestamp monitoring
43
#undef VIDEO_DEBUG_3	                                        // for FPS monitoring
44
#define VIDEO_DEBUG
45
#define VIDEO_DEBUG_2	                                        // for timestamp monitoring
46
//#define VIDEO_DEBUG_3	                                        // for FPS monitoring
47 48

#ifdef VIDEO_DEBUG
49 50 51 52 53
	#define D(s_port, a) \
	do { \
		cerr << __FILE__ << ": " << __FUNCTION__ << ": " << __LINE__ << ": sensor port: " << s_port << " "; \
		a; \
	} while (0)
54
#else
55
	#define D(s_port, a)
56 57 58
#endif

#ifdef VIDEO_DEBUG_2
59 60 61 62 63
	#define D2(s_port, a) \
	do { \
		cerr << __FILE__ << ": " << __FUNCTION__ << ": " << __LINE__ << ": sensor port: " << s_port << " "; \
		a; \
	} while (0)
64
#else
65
	#define D2(s_port, a)
66 67 68
#endif

#ifdef VIDEO_DEBUG_3
69 70 71 72 73
	#define D3(s_port, a) \
	do { \
		cerr << __FILE__ << ": " << __FUNCTION__ << ": " << __LINE__ << ": sensor port: " << s_port << " "; \
		a; \
	} while (0)
74
#else
75
	#define D3(s_port, a)
76 77
#endif

78 79 80 81 82 83
/** The length of interframe parameters in bytes */
#define METADATA_LEN              32
/** Convert byte offset to double word offset */
#define BYTE2DW(x)                ((x) >> 2)
/** Convert double word offset to byte offset */
#define DW2BYTE(x)                ((x) << 2)
84

85 86 87 88 89 90 91 92 93
static const char *circbuf_file_names[] = {
		DEV393_PATH(DEV393_CIRCBUF0), DEV393_PATH(DEV393_CIRCBUF1),
		DEV393_PATH(DEV393_CIRCBUF2), DEV393_PATH(DEV393_CIRCBUF3)
};
static const char *jhead_file_names[] = {
		DEV393_PATH(DEV393_JPEGHEAD0), DEV393_PATH(DEV393_JPEGHEAD1),
		DEV393_PATH(DEV393_JPEGHEAD2), DEV393_PATH(DEV393_JPEGHEAD3)
};

94 95 96 97 98 99 100
/**
 * @brief Start one instance of video interface for circbuf: open and mmap circbuf,
 * start RTP stream in new thread.
 * @param   port   sensor port number this instance should work with
 * @param   pars   pointer to parameters instance for the current sensor port
 * @return  None
 */
101 102 103 104
Video::Video(int port, Parameters *pars) {
	string err_msg;
	params = pars;
	sensor_port = port;
105
	stream_name = "video";
106 107 108
	lastDaemonBit = DAEMON_BIT_STREAMER;

	D(sensor_port, cerr << "Video::Video() on sensor port " << port << endl);
109 110 111 112 113 114
	fd_circbuf = open(circbuf_file_names[sensor_port], O_RDONLY);
	if (fd_circbuf < 0) {
		err_msg = "can't open " + static_cast<ostringstream &>(ostringstream() << dec << sensor_port).str();
		throw runtime_error(err_msg);
	}

115
	buffer_length = lseek(fd_circbuf, 0, SEEK_END);
116 117 118
	waitDaemonEnabled(-1);                                      // <0 - use default

	// mmap for all the lifetime of the program, not per stream. AF
119 120 121 122 123
	buffer_ptr = (unsigned long *) mmap(0, buffer_length, PROT_READ, MAP_SHARED, fd_circbuf, 0);
	if ((int) buffer_ptr == -1) {
		err_msg = "can't mmap " + *circbuf_file_names[sensor_port];
		throw runtime_error(err_msg);
	}
124
	buffer_ptr_end = (unsigned char *)(buffer_ptr + BYTE2DW(buffer_length));
125

126 127
	// Skip several frames if it is just booted
	// May get stuck here if compressor is off, it should be enabled externally
128
	D(sensor_port, cerr << " frame=" << params->getGPValue(G_THIS_FRAME) << " buffer_length=" << buffer_length << endl);
129
	while (params->getGPValue(G_THIS_FRAME) < 10) {
130 131
		lseek(fd_circbuf, LSEEK_CIRC_TOWP, SEEK_END);           // get to the end of buffer
		lseek(fd_circbuf, LSEEK_CIRC_WAIT, SEEK_END);           // wait frame got ready there
132
	}
133
	// One more wait always to make sure compressor is actually running
134 135
	lseek(fd_circbuf, LSEEK_CIRC_WAIT, SEEK_END);
	lseek(fd_circbuf, LSEEK_CIRC_WAIT, SEEK_END);
136
	D(sensor_port, cerr << " frame=" << params->getGPValue(G_THIS_FRAME) << " buffer_length=" << buffer_length <<endl);
137 138 139 140 141
	fd_jpeghead = open(jhead_file_names[sensor_port], O_RDWR);
	if (fd_jpeghead < 0) {
		err_msg = "can't open " + *jhead_file_names[sensor_port];
		throw runtime_error(err_msg);
	}
142 143 144 145 146 147 148 149 150
	qtables_include = true;

	SSRC = 12;
	_ptype = 26;
	rtp_socket = NULL;
	rtcp_socket = NULL;
	_play = false;
	prev_jpeg_wp = 0;
	f_quality = -1;
151

152
	// create thread...
153
	init_pthread((void *) this);
154
	D(sensor_port, cerr << "finish constructor" << endl);
155 156
}

157 158 159 160 161
/**
 * @brief Close and unmap circbuf files
 * @param   None
 * @return  None
 */
162
Video::~Video(void) {
163 164 165 166 167 168
	cerr << "Video::~Video() on port " << sensor_port << endl;
	if (buffer_ptr != NULL) {
		munmap(buffer_ptr, buffer_length);
		buffer_ptr = NULL;
	}
	if (fd_circbuf > 0)
169
		close(fd_circbuf);
170
	if (fd_jpeghead > 0)
171 172 173 174
		close(fd_jpeghead);
}

void Video::Start(string ip, long port, int _fps_scale, int ttl) {
175 176
	D(sensor_port, cerr << "_play=" << _play << endl);
	if (_play) {
177 178 179 180 181 182 183
		cerr << "ERROR-->> wrong usage: Video()->Start() when already play!!!" << endl;
		return;
	}
	// statistic
	v_t_sec = 0;
	v_t_usec = 0;
	v_frames = 0;
184
	// create UDP socket
185 186 187 188 189 190
	struct video_desc_t video_desc = get_current_desc(false);
	f_width = video_desc.width;
	f_height = video_desc.height;
	used_width = f_width;
	used_height = f_height;
	fps_scale = _fps_scale;
191
	if (fps_scale < 1)
192 193 194 195 196 197
		fps_scale = 1;
	fps_scale_c = 0;
	RTP_Stream::Start(ip, port, ttl);
}

void Video::Stop(void) {
198
	if (!_play)
199 200
		return;
	RTP_Stream::Stop();
201
	_play = false;
202
	// destroy UDP socket
203 204 205 206 207
	prev_jpeg_wp = 0;
}


/**
208
 * @brief Check if this application is enabled (by appropriate bit in P_DAEMON_EN), if not -
209
 * and wait until enabled (return false when enabled)
210
 * @param   daemonBit   bit number to accept control in P_DAEMON_EN parameter
211 212
 * @return (after possible waiting) true if there was no waiting, false if there was waiting
 */
213
bool Video::waitDaemonEnabled(int daemonBit) {                  // <0 - use default
214
	if ((daemonBit >= 0) && (daemonBit < 32))
215 216
		lastDaemonBit = daemonBit;
	unsigned long this_frame = params->getGPValue(G_THIS_FRAME);
217 218
	// No semaphores, so it is possible to miss event and wait until the streamer will be re-enabled before sending message,
	// but it seems not so terrible
219
	D(sensor_port, cerr << " lseek(fd_circbuf" << sensor_port << ", LSEEK_DAEMON_CIRCBUF+lastDaemonBit, SEEK_END)... " << endl);
220
	lseek(fd_circbuf, LSEEK_DAEMON_CIRCBUF + lastDaemonBit, SEEK_END);
221
	D(sensor_port, cerr << "...done" << endl);
222 223

	if (this_frame == params->getGPValue(G_THIS_FRAME))
224 225 226 227 228
		return true;
	return false;
}

/**
229 230
 * @brief Check if this application is enabled (by appropriate bit in P_DAEMON_EN)
 * @param   daemonBit   bit number to accept control in P_DAEMON_EN parameter
231 232 233
 * @return (after possible waiting) true if there was no waiting, false if there was waiting
 */
bool Video::isDaemonEnabled(int daemonBit) { // <0 - use default
234
	if ((daemonBit >= 0) && (daemonBit < 32))
235
		lastDaemonBit = daemonBit;
236
	return ((params->getFrameValue(P_DAEMON_EN) & (1 << lastDaemonBit)) != 0);
237 238 239 240 241 242 243
}


/**
 * @brief Return (byte) pointer to valid frame 'before' current(if current is invalid - use latest,
 * wait if none are ready. Restore (or modify if had to wait) file pointer.
 * fill provided frame_pars with the metadata (including the time stamp)
244 245 246
 * @param   frame_pars   pointer to a interframe parameters structure
 * @param   before   how many frames before current pointer is needed
 * @return  pointer (offset in circbuf) to the frame start
247 248 249 250 251
 */
long Video::getFramePars(struct interframe_params_t *frame_pars, long before, long ptr_before) {
	long cur_pointer, p;

	long this_pointer = 0;
252
	if (ptr_before > 0) {
253 254
		// if we need some before frame, we should set pointer to saved one (saved with before == 0)
		this_pointer = lseek(fd_circbuf, ptr_before, SEEK_SET); // restore the file pointer
255
	}
256
	if (ptr_before < 0) {
257 258
		// otherwise, set pointer to the actual frame
		this_pointer = lseek(fd_circbuf, LSEEK_CIRC_TOWP, SEEK_END); // byte index in circbuf of the frame start
259
	}
260
	if (ptr_before == 0)
261
		this_pointer = lseek(fd_circbuf, 0, SEEK_CUR);          // save original file pointer
262
	char *char_buffer_ptr = (char *) buffer_ptr;
263 264
	if (lseek(fd_circbuf, LSEEK_CIRC_VALID, SEEK_END) < 0) {    // Invalid frame - reset to the latest acquired
		this_pointer = lseek(fd_circbuf, LSEEK_CIRC_LAST, SEEK_END); // Last acquired frame (may be not yet available if none are)
265 266
	}
	cur_pointer = this_pointer;
267
	if (before == 0)
268
		lseek(fd_circbuf, LSEEK_CIRC_WAIT, SEEK_END);
269
	while (before && (((p = lseek(fd_circbuf, LSEEK_CIRC_PREV, SEEK_END))) >= 0)) { // try to get earlier valid frame
270 271 272 273
		cur_pointer = p;
		before--;
	}

274
	// if 'before' is still >0 - not enough frames acquired, wait for more
275
	while (before > 0) {
276 277 278 279 280
		lseek(fd_circbuf, this_pointer, SEEK_SET);
		lseek(fd_circbuf, LSEEK_CIRC_WAIT, SEEK_END);
		this_pointer = lseek(fd_circbuf, LSEEK_CIRC_NEXT, SEEK_END);
		before--;
	}
281

282
	// copy the interframe data (time stamps are not yet there)
283 284 285 286 287 288 289
	long metadata_start = cur_pointer - METADATA_LEN;
	if (metadata_start >= 0) {
		D(sensor_port, cerr << " before=" << before << " metadata_start=" << metadata_start << endl);
		memcpy(frame_pars, &char_buffer_ptr[metadata_start], METADATA_LEN);
	} else {
		// matadata rolls over the end of the buffer and we need to copy both chunks
		size_t meta_len_first = METADATA_LEN - cur_pointer;
290
		metadata_start += buffer_length;
291 292 293 294 295 296 297 298 299 300
		memcpy(frame_pars, &char_buffer_ptr[metadata_start], meta_len_first);
		D(sensor_port, cerr << "metadata rolls over: metadata_start = " << metadata_start << "first chunk len = " << meta_len_first);

		size_t meta_len_second = METADATA_LEN - meta_len_first;
		char *dest = (char *)frame_pars;
		memcpy(&dest[meta_len_first], char_buffer_ptr, meta_len_second);
		D(sensor_port, cerr << ", second chunk len = " << meta_len_second << endl);
	}

	long jpeg_len = frame_pars->frame_length;                   // frame_pars->frame_length is now the length of bitstream
301 302 303 304 305 306 307 308 309 310 311
	if (frame_pars->signffff != 0xffff) {
		cerr << __FILE__ << ":" << __FUNCTION__ << ":" << __LINE__
				<< "  Wrong signature in getFramePars() (broken frame), frame_pars->signffff="
				<< frame_pars->signffff << endl;
		int i;
		long * dd = (long *) frame_pars;
		cerr << hex << (metadata_start / 4) << ": ";
		for (i = 0; i < 8; i++) {
			cerr << hex << dd[i] << "  ";
		}
		cerr << dec << endl;
312
		return -1;
313
	}
314
	// find location of the time stamp and copy it to the frame_pars structure
315 316
	long timestamp_start = (cur_pointer) + ((jpeg_len + CCAM_MMAP_META + 3) & (~0x1f)) + 32- CCAM_MMAP_META_SEC; //! magic shift - should index first byte of the time stamp
	if (timestamp_start >= buffer_length)
317 318
		timestamp_start -= buffer_length;
	memcpy(&(frame_pars->timestamp_sec), &char_buffer_ptr[timestamp_start], 8);
319
	if (ptr_before == 0)
320
		lseek(fd_circbuf, this_pointer, SEEK_SET);              // restore the file pointer
321

322 323 324 325
	return cur_pointer;
}


326 327 328 329 330 331 332
/**
 * @brief Return description of the current video frame, i.e. current video parameters
 * In the next function I assume that the frame pointed by current file pointer in circbuf is ready. Otherwise
 * (if it points at next frame to be acquired) we need to increase argument of getValidFrame(long before) by 1
 * get all parameters together
 */
#define FRAMEPARS_BEFORE 0                                      // Change to 1 if frames are not yet ready when these functions are called
333 334 335 336 337
struct video_desc_t Video::get_current_desc(bool with_fps) {
	struct interframe_params_t frame_pars, prev_pars;
	struct video_desc_t video_desc;
	video_desc.valid = false;
	long ptr = -1;
338
	if ((ptr = getFramePars(&frame_pars, FRAMEPARS_BEFORE, -1)) < 0) {
339 340
		return video_desc;
	} else {
341 342
		if (with_fps) {
			if (getFramePars(&prev_pars, FRAMEPARS_BEFORE + 1, ptr) < 0)
343 344 345 346 347 348 349
				return video_desc;
			double fps = (frame_pars.timestamp_sec - prev_pars.timestamp_sec);
			fps *= 1000000.0;
			fps += frame_pars.timestamp_usec;
			fps -= prev_pars.timestamp_usec;
			fps = 1000000.0 / fps;
			video_desc.fps = (used_fps = fps);
350
			D(sensor_port, cerr << " fps=" << fps << endl);
351 352 353 354 355 356 357 358 359 360
		}
	}
	video_desc.valid = true;
	video_desc.width = frame_pars.width;
	video_desc.height = frame_pars.height;
	video_desc.quality = frame_pars.quality2;
	return video_desc;
}

void Video::fps(float fps) {
361
	if (fps < 0.01)
362
		return;
363
	// currently limiting FPS only works with free running TODO: Add external trigger frequency support.
364 365 366 367 368
	unsigned long write_data[6];
	long target_frame = params->getGPValue(G_THIS_FRAME) + FRAMES_AHEAD_FPS;
	write_data[0] = FRAMEPARS_SETFRAME;
	write_data[1] = target_frame; /// wait then for that frame to be available on the output plus 2 frames for fps to be stable
	write_data[2] = P_FP1000SLIM;
369
	write_data[3] = (unsigned long) fps * 1000;
370 371 372
	write_data[4] = P_FPSFLAGS;
	write_data[5] = 3;
	int rslt = params->write(write_data, sizeof(write_data));
373
	if (rslt == sizeof(write_data)) { /// written OK
374 375 376 377
		params->lseek(LSEEK_FRAME_WAIT_ABS + target_frame + FRAMES_SKIP_FPS, SEEK_END); /// skip frames 
	}
}

378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394
/** Get frame length in bytes.
 * @param   offset   byte offset of a frame in cirbuf
 * @return  The length of the frame in bytes
 */
unsigned long Video::get_frame_len(unsigned long offset)
{
	unsigned long len;
	long long len_offset = BYTE2DW(offset) - 1;

	if (len_offset < 0) {
		len_offset = BYTE2DW(buffer_length - offset) - 1;
	}
	len = buffer_ptr[len_offset];

	return len;
}

395
/** Get interframe parameters for the frame offset given and copy them to the buffer.
396 397 398 399 400 401
 * @param   frame_pars   buffer for interframe parameters
 * @param   offset       starting offset of the frame in circbuf (in bytes)
 * @return  None
 */
void Video::get_frame_pars(void *frame_pars, unsigned long offset)
{
402
	unsigned long *ptr;
403 404 405 406
	unsigned long remainder;
	unsigned long pos;

	if (offset >= METADATA_LEN) {
407 408 409
		ptr = &buffer_ptr[BYTE2DW(offset - METADATA_LEN)];
		memcpy(frame_pars, ptr, METADATA_LEN);
		D3(sensor_port, cerr << "Read interframe params, ptr: " << (void *)ptr << endl);
410 411 412 413
	} else {
		// copy the chunk from the end of the buffer
		remainder = METADATA_LEN - offset;
		pos = buffer_length - offset;
414 415 416
		ptr = &buffer_ptr[BYTE2DW(pos)];
		memcpy(frame_pars, ptr, remainder);
		D3(sensor_port, cerr << "Read interframe params (first chunk), ptr: " << (void *)ptr << endl);
417 418 419 420

		// copy the chunk from the beginning of the buffer
		char *dest = (char *)frame_pars + remainder;
		memcpy(dest, buffer_ptr, offset);
421
		D3(sensor_port, cerr << "Read interframe params (second chunk), ptr: " << (void *)buffer_ptr << endl);
422 423 424
	}
}

425 426 427 428
#define USE_REAL_OLD_TIMESTAMP 0
long Video::capture(void) {
	long frame_len;
	struct interframe_params_t frame_pars;
429 430
	struct interframe_params_t curr_frame_params;
	struct interframe_params_t *fp = &curr_frame_params;
431 432 433 434
	int quality;
	unsigned long latestAvailableFrame_ptr;
	unsigned long frameStartByteIndex;
	int before;
435 436

	// make sure the streamer is not disabled through the bit in P_DAEMON_EN
437
	if ((params->getFrameValue(P_DAEMON_EN) & (1 << lastDaemonBit)) == 0) {
438
		return -DAEMON_DISABLED;                                // return exception (will stop the stream)
439
	}
440
	frameStartByteIndex = lseek(fd_circbuf, LSEEK_CIRC_TOWP, SEEK_END); // byte index in circbuf of the frame start
441 442 443
	latestAvailableFrame_ptr = frameStartByteIndex;
	lseek(fd_circbuf, LSEEK_CIRC_WAIT, SEEK_END);

444
	frame_ptr = (char *) ((unsigned long) buffer_ptr + latestAvailableFrame_ptr);
445
	frame_len = get_frame_len(latestAvailableFrame_ptr);
446 447 448
	D3(sensor_port, cerr << "Frame start byte index: " << frameStartByteIndex <<
			", frame pointer: " << (void *)frame_ptr <<
			", frame length: " << frame_len << endl);
449 450

	// read time stamp
451 452 453 454
	unsigned char *ts_ptr = (unsigned char *) ((unsigned long) frame_ptr + (long) (((frame_len + CCAM_MMAP_META + 3) & (~0x1f)) + 32 - CCAM_MMAP_META_SEC));
	if (ts_ptr >= buffer_ptr_end) {
		ts_ptr -= buffer_length;
	}
455
	unsigned long t[2];
456
	memcpy(&t, (void *) ts_ptr, 8);
457 458
	f_tv.tv_sec = t[0];
	f_tv.tv_usec = t[1];
459

460
	// read Q value
461 462 463 464 465 466
	get_frame_pars(fp, latestAvailableFrame_ptr);

	// See if the frame parameters are the same as were used when starting the stream,
	// otherwise check for up to G_SKIP_DIFF_FRAME older frames and return them instead.
	// If that number is exceeded - return exception.
	// Each time the latest acquired frame is considered, so we do not need to save frame pointer additionally
467 468 469 470
	if ((fp->width != used_width) || (fp->height != used_height)) {
		for (before = 1; before <= (int) params->getGPValue(G_SKIP_DIFF_FRAME); before++) {
			if (((frameStartByteIndex = getFramePars(&frame_pars, before)))
					&& (frame_pars.width == used_width) && (frame_pars.height == used_height)) {
471 472
				// substitute older frame instead of the latest one. Leave wrong timestamp?
				// copying code above (may need some cleanup). Maybe - just move earlier so there will be no code duplication?
473
				latestAvailableFrame_ptr = frameStartByteIndex;
474
				frame_ptr = (char *) ((unsigned long) buffer_ptr + latestAvailableFrame_ptr);
475 476 477
				frame_len = get_frame_len(latestAvailableFrame_ptr);
				D3(sensor_port, cerr << "Frame length " << frame_len << endl);

478 479 480 481 482 483 484
#if USE_REAL_OLD_TIMESTAMP
/// read timestamp
				ts_ptr = (char *)((unsigned long)frame_ptr + (long)(((frame_len + CCAM_MMAP_META + 3) & (~0x1f)) + 32 - CCAM_MMAP_META_SEC));
				memcpy(&t, (void *)ts_ptr, 8);
				f_tv.tv_sec = t[0];
				f_tv.tv_usec = t[1];
#endif
485
				// update interframe data pointer
486 487
				get_frame_pars(fp, latestAvailableFrame_ptr);
				D3(sensor_port, cerr << "frame_pars->signffff" << fp->signffff << endl);
488 489 490
				break;
			}
		}
491 492
		if (before > (int) params->getGPValue(G_SKIP_DIFF_FRAME)) {
			D(sensor_port, cerr << " Killing stream because of frame size change " << endl);
493 494
			return -SIZE_CHANGE; /// It seems that frame size is changed for good, need to restart the stream
		}
495
		D(sensor_port, cerr << " Waiting for the original frame size to be restored , using " << before << " frames ago" << endl);
496
	}
497

498
	quality = fp->quality2;
499 500
	if (qtables_include && quality != f_quality) {
		D(sensor_port, cerr << " Updating quality tables, new quality is " << quality << endl);
501
		lseek(fd_jpeghead, frameStartByteIndex | 2, SEEK_END);  // '| 2' indicates that we need just quantization tables, not full JPEG header
502
		read(fd_jpeghead, (void *) &qtable[0], 128);
503 504
	}
	f_quality = quality;
505

506
	return frame_len;
507 508 509 510 511 512 513
}

long Video::process(void) {
	int _plen = 1400;
	int to_send = _plen;
	int _qtables_len = 128 + 4;
	long frame_len = capture();
514
	if (frame_len == 0) {
515
		return 0;                                               // now never here
516
	} else {
517 518
		if (frame_len < 0) {
			D(sensor_port, cerr << "capture returned negative" << frame_len << endl);
519
			return frame_len;                                   // attention (restart) is needed
520 521 522 523
		}
	}
	// check FPS decimation
	bool to_skip = true;
524
	if (fps_scale_c == 0)
525 526
		to_skip = false;
	fps_scale_c++;
527
	if (fps_scale_c >= fps_scale)
528
		fps_scale_c = 0;
529
	if (to_skip)
530 531 532 533 534
		return 1;

	int to_send_len = frame_len;
	unsigned char h[20 + 4];
	int packet_len = 0;
535
	unsigned char *data = (unsigned char *) frame_ptr;
536 537 538 539 540 541 542 543

	uint64_t t = f_tv.tv_sec;
	t *= 90000;
	t &= 0x00FFFFFFFF;
	timestamp = t;
	double f = f_tv.tv_usec;
	f /= 1000000.0;
	f *= 90000.0;
544
	timestamp += (uint32_t) f;
545 546 547
	uint32_t ts;
	ts = timestamp;
	ts = htonl(ts);
548
	D(sensor_port, cerr << "This frame's time stamp: " << timestamp << endl);
549 550

	long offset = 0;
551 552
	struct iovec iov[4];
	int vect_num;
553
	bool first = true;
554
	while (to_send_len && _play) {
555 556 557
		unsigned long pnum = htons(packet_num);
		bool last = false;
		to_send = _plen;
558
		if (qtables_include && first)
559
			to_send = _plen - _qtables_len;
560
		if (to_send_len <= to_send) {
561 562 563 564 565 566 567 568 569
			packet_len = to_send_len;
			to_send_len = 0;
			last = true;
		} else {
			packet_len = to_send;
			to_send_len -= to_send;
		}
		// make RTP packet
		h[0] = 0x80;
570
		if (!last)
571 572 573
			h[1] = _ptype;
		else
			h[1] = 0x80 + _ptype;
574 575 576
		memcpy((void *) &h[2], (void *) &pnum, 2);
		memcpy((void *) &h[4], (void *) &ts, 4);
		memcpy((void *) &h[8], (void *) &SSRC, 4);
577 578
		// make MJPEG header
		unsigned long off = htonl(offset);
579
		memcpy((void *) &h[12], (void *) &off, 4);
580 581 582
		h[12] = 0x00;
		h[16] = 0x01;
		unsigned int q = f_quality;
583
		if (qtables_include)
584
			q += 128;
585 586
		h[17] = (unsigned char) (q & 0xFF);
		if (f_width <= 2040)
587 588 589
			h[18] = (f_width / 8) & 0xFF;
		else
			h[18] = 0;
590
		if (f_height <= 2040)
591 592 593 594 595 596
			h[19] = (f_height / 8) & 0xFF;
		else
			h[19] = 0;
		h[20] = 0;
		h[21] = 0;
		unsigned short l = htons(128);
597
		memcpy((void *) &h[22], (void *) &l, 2);
598 599
		// update RTCP statistic
		rtp_packets++;
600
		rtp_octets += packet_len + 8;                           // data + MJPEG header
601
		// send vector
602 603
		vect_num = 0;
		iov[vect_num].iov_base = h;
604 605
		if (first) {
			if (qtables_include) {
606 607 608
				iov[vect_num++].iov_len = 24;
				iov[vect_num].iov_base = qtable;
				iov[vect_num++].iov_len = 128;
609
			} else {
610
				iov[vect_num++].iov_len = 20;
611 612 613
			}
			first = false;
		} else {
614
			iov[vect_num++].iov_len = 20;
615
		}
616
		if ((data + packet_len) <= buffer_ptr_end) {
617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634
			iov[vect_num].iov_base = data;
			iov[vect_num++].iov_len = packet_len;
			data += packet_len;
		} else {
			// current packet rolls over the end of the buffer, split it and set data pointer to the buffer start
			int overshoot = (data + packet_len) - (unsigned char *)(buffer_ptr + BYTE2DW(buffer_length));
			int packet_len_first = packet_len - overshoot;
			iov[vect_num].iov_base = data;
			iov[vect_num++].iov_len = packet_len_first;

			iov[vect_num].iov_base = buffer_ptr;
			iov[vect_num++].iov_len = overshoot;
			D3(sensor_port, cerr << "Current data packet rolls over the buffer, overshoot: " << overshoot <<
					", packet_len_first: " << packet_len_first << endl);
			data = (unsigned char *)buffer_ptr + overshoot;
		}
		rtp_socket->send_vect(iov, vect_num);

635 636 637
		packet_num++;
		offset += packet_len;
	}
638
	D3(sensor_port, cerr << "Packets sent: " << packet_num << endl);
639 640
	return 1;
}