Commit e7b3122e authored by Mikhail Karpenko's avatar Mikhail Karpenko

Update debug macros for streamer and video classes

parent 99103819
......@@ -38,11 +38,17 @@ using namespace std;
* @return None
*/
void clean_up(pthread_t *threads, size_t sz) {
for (size_t i = 0; i < sz; i++)
pthread_cancel(threads[i]);
int ret_val;
for (size_t i = 0; i < sz; i++) {
ret_val = pthread_cancel(threads[i]);
if (!ret_val)
cout << "pthread_cancel returned " << ret_val << ", sensor port " << i << endl;
}
}
int main(int argc, char *argv[]) {
int ret_val;
string opt;
map<string, string> args;
pthread_t threads[SENSOR_PORTS];
......@@ -72,8 +78,10 @@ int main(int argc, char *argv[]) {
streamers[i] = new Streamer(args, i);
pthread_attr_init(&attr);
if (!pthread_create(&threads[i], &attr, Streamer::pthread_f, (void *) streamers[i])) {
cerr << "Can not spawn streamer thread for port " << i << endl;
ret_val = pthread_create(&threads[i], &attr, Streamer::pthread_f, (void *) streamers[i]);
if (ret_val != 0) {
cerr << "Can not spawn streamer thread for port " << i;
cerr << ", pthread_create returned " << ret_val << endl;
clean_up(threads, SENSOR_PORTS);
exit(EXIT_FAILURE);
}
......
......@@ -47,6 +47,7 @@ public:
off_t lseek(off_t offset, int whence) { return ::lseek(fd_fparmsall, offset, whence); }
bool daemon_enabled(void);
void setPValue(unsigned long *val_array, int count);
inline int get_port_num() const {return sensor_port;}
protected:
// static Parameters *_parameters;
......
......@@ -37,15 +37,23 @@ using namespace std;
#define RTSP_DEBUG_2
#ifdef RTSP_DEBUG
#define D(a) a
#define D(s_port, a) \
do { \
cerr << __FILE__ << ": " << __FUNCTION__ << ": " << __LINE__ << ": sensor port: " << s_port << " "; \
a; \
} while (0)
#else
#define D(a)
#define D(s_port, a)
#endif
#ifdef RTSP_DEBUG_2
#define D2(a) a
#define D2(s_port, a) \
do { \
cerr << __FILE__ << ": " << __FUNCTION__ << ": " << __LINE__ << ": sensor port: " << s_port << " "; \
a; \
} while (0)
#else
#define D2(a)
#define D2(s_port, a)
#endif
//Streamer *Streamer::_streamer = NULL;
......@@ -65,12 +73,12 @@ Streamer::Streamer(const map<string, string> &_args, int port_num) {
session->video.fps_scale = 1;
audio_init();
video = new Video(sensor_port, params);
if(opt_present("f")) {
if (opt_present("f")) {
float fps = 0;
fps = atof(args["f"].c_str());
if(fps < 0.1)
if (fps < 0.1)
fps = 0;
D( cout << "use fps: " << fps << endl;)
D(sensor_port, cout << "use fps: " << fps << endl);
video->fps(fps);
}
rtsp_server = NULL;
......@@ -79,10 +87,10 @@ D( cout << "use fps: " << fps << endl;)
void Streamer::audio_init(void) {
if (audio != NULL) {
D(cerr << "delete audio" << endl;)
D(sensor_port, cerr << "delete audio" << endl);
delete audio;
}
D(cout << "audio_enabled == " << session->process_audio << endl;)
D(sensor_port, cout << "audio_enabled == " << session->process_audio << endl);
audio = new Audio(session->process_audio, params, session->audio.sample_rate, session->audio.channels);
if (audio->present() && session->process_audio) {
session->process_audio = true;
......@@ -109,7 +117,7 @@ int Streamer::f_handler(void *ptr, RTSP_Server *rtsp_server, RTSP_Server::event
}
int Streamer::update_settings(bool apply) {
D( cerr << "update_settings" << endl;)
D(sensor_port, cerr << "update_settings" << endl);
// check settings, normalize its, return 1 if was changed
// update settings at application if apply = 1 and parameters change isn't on-fly safe, update parameters always
......@@ -130,19 +138,19 @@ D( cerr << "update_settings" << endl;)
//----------------
// frame skip, or FPS scale
int frames_skip = params->getGPValue(P_STROP_FRAMES_SKIP);
if(frames_skip < 0 || frames_skip > 0xFFFF) {
if(frames_skip < 0)
if (frames_skip < 0 || frames_skip > 0xFFFF) {
if (frames_skip < 0)
frames_skip = 0;
if(frames_skip < 0xFFFF)
if (frames_skip < 0xFFFF)
frames_skip = 0xFFFF;
changes_array[changes_array_i + 0] = P_STROP_FRAMES_SKIP;
changes_array[changes_array_i + 1] = (unsigned long)frames_skip;
changes_array[changes_array_i + 1] = (unsigned long) frames_skip;
changes_array_i += 2;
params_update = true;
}
frames_skip += 1; // convert to fps_scale format;
if(frames_skip != session->video.fps_scale) {
if(apply)
if (frames_skip != session->video.fps_scale) {
if (apply)
session->video.fps_scale = frames_skip;
//cerr << "session->video.fps_scale = " << session->video.fps_scale << endl;
result = true;
......@@ -151,10 +159,10 @@ D( cerr << "update_settings" << endl;)
// transport parameters
bool transport_was_changed = false;
bool param_multicast = params->getGPValue(P_STROP_MCAST_EN);
if(param_multicast || session->rtp_out.multicast) {
if (param_multicast || session->rtp_out.multicast) {
// multicast/unicast
if(param_multicast != session->rtp_out.multicast) {
if(apply)
if (param_multicast != session->rtp_out.multicast) {
if (apply)
session->rtp_out.multicast = param_multicast;
transport_was_changed = true;
}
......@@ -162,31 +170,31 @@ D( cerr << "update_settings" << endl;)
unsigned long ip = params->getGPValue(P_STROP_MCAST_IP);
bool ip_was_changed = false;
// switch custom/default IP
if((ip == 0) && session->rtp_out.ip_custom)
if ((ip == 0) && session->rtp_out.ip_custom)
ip_was_changed = true;
if((ip != 0) && !session->rtp_out.ip_custom)
if ((ip != 0) && !session->rtp_out.ip_custom)
ip_was_changed = true;
// change of custom IP
if((ip != 0) && session->rtp_out.ip_custom)
if(ip != session->rtp_out.ip_cached)
if ((ip != 0) && session->rtp_out.ip_custom)
if (ip != session->rtp_out.ip_cached)
ip_was_changed = true;
if(ip_was_changed) {
if(ip != 0) {
if (ip_was_changed) {
if (ip != 0) {
struct in_addr a;
uint32_t a_min = ntohl(inet_addr("224.0.0.0"));
uint32_t a_max = ntohl(inet_addr("239.255.255.255"));
if(a_min > a_max) {
if (a_min > a_max) {
uint32_t a = a_min;
a_min = a_max;
a_max = a;
}
if(ip < a_min)
if (ip < a_min)
ip = a_min;
if(ip > a_max)
if (ip > a_max)
ip = a_max;
a.s_addr = htonl(ip);
D( cerr << "multicast ip asked: " << inet_ntoa(a) << endl;)
if(apply) {
D(sensor_port, cerr << "multicast ip asked: " << inet_ntoa(a) << endl);
if (apply) {
session->rtp_out.ip_cached = ip;
session->rtp_out.ip_custom = true;
session->rtp_out.ip = inet_ntoa(a);
......@@ -196,24 +204,24 @@ D( cerr << "multicast ip asked: " << inet_ntoa(a) << endl;)
}
} else {
struct in_addr a = Socket::mcast_from_local();
D( cerr << "multicast ip generated: " << inet_ntoa(a) << endl;)
if(apply) {
D(sensor_port, cerr << "multicast ip generated: " << inet_ntoa(a) << endl);
if (apply) {
session->rtp_out.ip_custom = false;
session->rtp_out.ip = inet_ntoa(a);
}
}
transport_was_changed = true;
}
D( if(apply))
D( cerr << "actual multicast IP: " << session->rtp_out.ip << endl;)
//D( if(apply))
D(sensor_port, if (apply) cerr << "actual multicast IP: " << session->rtp_out.ip << endl);
// port
int port = params->getGPValue(P_STROP_MCAST_PORT);
if(port != session->rtp_out.port_video) {
if(port < 1024)
if (port != session->rtp_out.port_video) {
if (port < 1024)
port = 1024;
if(port > 65532)
if (port > 65532)
port = 65532;
if(apply) {
if (apply) {
session->rtp_out.port_video = port;
session->rtp_out.port_audio = port + 2;
changes_array[changes_array_i + 0] = P_STROP_MCAST_PORT;
......@@ -224,12 +232,12 @@ D( cerr << "actual multicast IP: " << session->rtp_out.ip << endl;)
}
// ttl
int ttl = params->getGPValue(P_STROP_MCAST_TTL);
if(ttl != atoi(session->rtp_out.ttl.c_str())) {
if(ttl < 1)
if (ttl != atoi(session->rtp_out.ttl.c_str())) {
if (ttl < 1)
ttl = 1;
if(ttl > 15)
if (ttl > 15)
ttl = 15;
if(apply) {
if (apply) {
char buf[8];
sprintf(buf, "%d", ttl);
session->rtp_out.ttl = buf;
......@@ -240,7 +248,7 @@ D( cerr << "actual multicast IP: " << session->rtp_out.ip << endl;)
transport_was_changed = true;
}
}
if(transport_was_changed)
if (transport_was_changed)
params_update = true;
//-----------------
......@@ -250,63 +258,63 @@ D( cerr << "actual multicast IP: " << session->rtp_out.ip << endl;)
bool f_audio_rate = false;
bool f_audio_channels = false;
// - enabled/disabled
if(params->getGPValue(P_STROP_AUDIO_EN) == 0)
if (params->getGPValue(P_STROP_AUDIO_EN) == 0)
audio_proc = false;
int audio_rate = params->getGPValue(P_STROP_AUDIO_RATE);
int audio_channels = params->getGPValue(P_STROP_AUDIO_CHANNEL);
if(audio_proc != session->process_audio)
if (audio_proc != session->process_audio)
audio_was_changed = true;
if(audio_rate != session->audio.sample_rate)
if (audio_rate != session->audio.sample_rate)
f_audio_rate = true;
if(audio_channels != session->audio.channels)
if (audio_channels != session->audio.channels)
f_audio_channels = true;
if((audio_proc || session->process_audio) && (f_audio_rate || f_audio_channels))
if ((audio_proc || session->process_audio) && (f_audio_rate || f_audio_channels))
audio_was_changed = true;
if(apply) {
if (apply) {
bool audio_restarted = false;
if(audio_was_changed) {
if (audio_was_changed) {
session->process_audio = audio_proc;
session->audio.sample_rate = audio_rate;
session->audio.channels = audio_channels;
D2( cerr << "Audio was changed. Should restart it" << endl;)
D2(sensor_port, cerr << "Audio was changed. Should restart it" << endl);
audio_init();
audio_restarted = true;
// if audio enable was asked, check what soundcard really is connected
if(audio_proc) {
if(!audio->present()) {
if (audio_proc) {
if (!audio->present()) {
session->process_audio = false;
changes_array[changes_array_i + 0] = P_STROP_AUDIO_EN;
changes_array[changes_array_i + 1] = 0;
changes_array_i += 2;
}
}
if(f_audio_rate) {
if (f_audio_rate) {
changes_array[changes_array_i + 0] = P_STROP_AUDIO_RATE;
changes_array[changes_array_i + 1] = session->audio.sample_rate;
changes_array_i += 2;
}
if(f_audio_channels) {
if (f_audio_channels) {
changes_array[changes_array_i + 0] = P_STROP_AUDIO_CHANNEL;
changes_array[changes_array_i + 1] = session->audio.channels;
changes_array_i += 2;
}
}
// was started before new client - must reinit audio
if(!audio_restarted && session->process_audio)
if (!audio_restarted && session->process_audio)
audio_init();
}
result = result || audio_was_changed || transport_was_changed;
// apply volume if audio is enabled, and volume was changed
if(session->process_audio) {
if(audio->present()) {
if (session->process_audio) {
if (audio->present()) {
// check volume
long volume = audio->volume();
int audio_volume = params->getGPValue(P_AUDIO_CAPTURE_VOLUME);
// and apply it
if(audio_volume != volume) {
if (audio_volume != volume) {
audio->set_volume(audio_volume);
changes_array[changes_array_i + 0] = P_AUDIO_CAPTURE_VOLUME;
changes_array[changes_array_i + 1] = audio->volume();
......@@ -318,7 +326,7 @@ D2( cerr << "Audio was changed. Should restart it" << endl;)
// update array of changes
// set frame to update
if(apply || params_update) {
if (apply || params_update) {
changes_array[0] = FRAMEPARS_SETFRAME;
changes_array[1] = params->getGPValue(G_THIS_FRAME) + 1;
params->setPValue(changes_array, changes_array_i);
......@@ -328,7 +336,7 @@ D2( cerr << "Audio was changed. Should restart it" << endl;)
// if(apply) {
// here - create new function from where update all settings
struct video_desc_t video_desc = video->get_current_desc();
if(video_desc.valid) {
if (video_desc.valid) {
session->video.width = video_desc.width;
session->video.height = video_desc.height;
session->video.fps = video_desc.fps;
......@@ -337,28 +345,29 @@ D2( cerr << "Audio was changed. Should restart it" << endl;)
session->video.type = video->ptype();
// }
if(result)
if (result)
return 1;
return 0;
}
int Streamer::handler(RTSP_Server *rtsp_server, RTSP_Server::event event) {
static bool _play = false;
D( cerr << "event: running= " << running << " ";)
switch(event) {
D(sensor_port, cerr << "event: running= " << running << " ");
switch (event) {
case RTSP_Server::DESCRIBE: /// Update frame size, fps before starting new stream (generating SDP file)
update_settings(true);
break;
case RTSP_Server::PARAMS_WAS_CHANGED: /// Update frame size, fps before starting new stream (generating SDP file)
return (update_settings(false) || !(params->daemon_enabled()));
case RTSP_Server::PLAY:
D( cerr << "==PLAY==";)
if(connected_count == 0) {
D(sensor_port, cerr << "==PLAY==");
if (connected_count == 0) {
int ttl = -1;
if(session->rtp_out.multicast)
if (session->rtp_out.multicast)
ttl = atoi(session->rtp_out.ttl.c_str());
video->Start(session->rtp_out.ip, session->rtp_out.port_video, session->video.fps_scale, ttl);
if(audio != NULL)
video->Start(session->rtp_out.ip, session->rtp_out.port_video, session->video.fps_scale,
ttl);
if (audio != NULL)
audio->Start(session->rtp_out.ip, session->rtp_out.port_audio, ttl);
}
connected_count++;
......@@ -366,11 +375,11 @@ D( cerr << "==PLAY==";)
running = true;
break;
case RTSP_Server::PAUSE:
D( cerr << "PAUSE";)
D(sensor_port, cerr << "PAUSE");
connected_count--;
if(connected_count <= 0) {
if (connected_count <= 0) {
video->Stop();
if(audio != NULL)
if (audio != NULL)
audio->Stop();
connected_count = 0;
_play = false;
......@@ -378,15 +387,15 @@ D( cerr << "PAUSE";)
}
break;
case RTSP_Server::TEARDOWN:
D( cerr << "TEARDOWN";)
if(!running) {
D( cerr << " was not running";)
D(sensor_port, cerr << "TEARDOWN");
if (!running) {
D(sensor_port, cerr << " was not running");
break;
}
connected_count--;
if(connected_count <= 0) {
if (connected_count <= 0) {
video->Stop();
if(audio != NULL)
if (audio != NULL)
audio->Stop();
connected_count = 0;
_play = false;
......@@ -394,35 +403,34 @@ D( cerr << " was not running";)
}
break;
case RTSP_Server::RESET:
D( cerr << "RESET";)
if(!running) {
D( cerr << " was not running";)
D(sensor_port, cerr << "RESET");
if (!running) {
D(sensor_port, cerr << " was not running");
break;
}
video->Stop();
if(audio != NULL)
if (audio != NULL)
audio->Stop();
connected_count = 0;
_play = false;
running = false;
break;
/*
/*
case RTSP_Server::IS_DAEMON_ENABLED:
D( cerr << "IS_DAEMON_ENABLED video->isDaemonEnabled(-1)=" << video->isDaemonEnabled(-1) << endl;)
D( cerr << "IS_DAEMON_ENABLED video->isDaemonEnabled(-1)=" << video->isDaemonEnabled(-1) << endl;)
return video->isDaemonEnabled(-1);
break;
*/
*/
default:
D( cerr << "unknown == " << event;)
D(sensor_port, cerr << "unknown == " << event);
break;
}
D( cerr << endl;)
D(sensor_port, cerr << endl);
return 0;
}
void Streamer::Main(void) {
D( cerr << "start Main for sensor port " << sensor_port << endl;)
string def_mcast = "232.1.1.1";
D(sensor_port, cerr << "start Main for sensor port " << sensor_port << endl);
int def_port = 20020;
string def_ttl = "2";
......@@ -442,18 +450,18 @@ void Streamer::Main(void) {
update_settings(true);
/// Got here if is and was enabled (may use more actions instead of just "continue"
// start RTSP server
D2( cerr << "start server" << endl;)
D2(sensor_port, cerr << "start server" << endl);
if (rtsp_server == NULL)
rtsp_server = new RTSP_Server(Streamer::f_handler, (void *) this, params, session);
rtsp_server->main();
D2( cerr << "server was stopped" << endl;)
D2( cerr << "stop video" << endl;)
D2(sensor_port, cerr << "server was stopped" << endl);
D2(sensor_port, cerr << "stop video" << endl);
video->Stop();
D2( cerr << "stop audio" << endl;)
D2(sensor_port, cerr << "stop audio" << endl);
if (audio != NULL) {
audio->Stop();
// free audio resource - other app can use soundcard
D2( cerr << "delete audio" << endl;)
D2(sensor_port, cerr << "delete audio" << endl);
delete audio;
audio = NULL;
}
......
......@@ -45,19 +45,31 @@ using namespace std;
#define VIDEO_DEBUG_3 // for FPS monitoring
#ifdef VIDEO_DEBUG
#define D(a) a
#define D(s_port, a) \
do { \
cerr << __FILE__ << ": " << __FUNCTION__ << ": " << __LINE__ << ": sensor port: " << s_port << " "; \
a; \
} while (0)
#else
#define D(a)
#endif
#ifdef VIDEO_DEBUG_2
#define D2(a) a
#define D2(s_port, a) \
do { \
cerr << __FILE__ << ": " << __FUNCTION__ << ": " << __LINE__ << ": sensor port: " << s_port << " "; \
a; \
} while (0)
#else
#define D2(a)
#endif
#ifdef VIDEO_DEBUG_3
#define D3(a) a
#define D3(s_port, a) \
do { \
cerr << __FILE__ << ": " << __FUNCTION__ << ": " << __LINE__ << ": sensor port: " << s_port << " "; \
a; \
} while (0)
#else
#define D3(a)
#endif
......@@ -87,8 +99,7 @@ static const char *jhead_file_names[] = {
Video::Video(int port, Parameters *pars) {
string err_msg;
D( cerr << "Video::Video() on port " << port << endl;)
D( cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__ << endl;)
D(sensor_port, cerr << "Video::Video() on sensor port " << port << endl);
params = pars;
sensor_port = port;
stream_name = "video";
......@@ -107,21 +118,16 @@ Video::Video(int port, Parameters *pars) {
err_msg = "can't mmap " + *circbuf_file_names[sensor_port];
throw runtime_error(err_msg);
}
cout << "<-- 1" << endl;
// buffer_ptr_s = (unsigned long *) mmap(buffer_ptr + (buffer_length >> 2), buffer_length,
// PROT_READ, MAP_FIXED | MAP_SHARED, fd_circbuf, 0); /// preventing buffer rollovers
buffer_ptr_s = (unsigned long *) mmap(buffer_ptr + (buffer_length >> 2), 100 * 4096,
PROT_READ, MAP_FIXED | MAP_SHARED, fd_circbuf, 0); /// preventing buffer rollovers
cout << "<-- 2" << endl;
if ((int) buffer_ptr_s == -1) {
err_msg = "can't create second mmap for " + *circbuf_file_names[sensor_port];
throw runtime_error(err_msg);
}
cout << "<-- 3" << endl;
/// Skip several frames if it is just booted
/// May get stuck here if compressor is off, it should be enabled externally
D( cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__ << " frame=" << params->getGPValue(G_THIS_FRAME) << " buffer_length=" << buffer_length << endl;)
D(sensor_port, cerr << " frame=" << params->getGPValue(G_THIS_FRAME) << " buffer_length=" << buffer_length << endl);
while (params->getGPValue(G_THIS_FRAME) < 10) {
lseek(fd_circbuf, LSEEK_CIRC_TOWP, SEEK_END); /// get to the end of buffer
lseek(fd_circbuf, LSEEK_CIRC_WAIT, SEEK_END); /// wait frame got ready there
......@@ -129,7 +135,7 @@ Video::Video(int port, Parameters *pars) {
/// One more wait always to make sure compressor is actually running
lseek(fd_circbuf, LSEEK_CIRC_WAIT, SEEK_END);
lseek(fd_circbuf, LSEEK_CIRC_WAIT, SEEK_END);
D(cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__ << " frame=" << params->getGPValue(G_THIS_FRAME) << " buffer_length=" << buffer_length <<endl;)
D(sensor_port, cerr << " frame=" << params->getGPValue(G_THIS_FRAME) << " buffer_length=" << buffer_length <<endl);
fd_jpeghead = open(jhead_file_names[sensor_port], O_RDWR);
if (fd_jpeghead < 0) {
err_msg = "can't open " + *jhead_file_names[sensor_port];
......@@ -147,7 +153,7 @@ Video::Video(int port, Parameters *pars) {
// create thread...
init_pthread((void *) this);
D( cerr << __FILE__<< ":" << __FUNCTION__ << ":" << __LINE__ << endl;)
D(sensor_port, cerr << "finish constructor" << endl);
}
Video::~Video(void) {
......@@ -169,8 +175,8 @@ Video::~Video(void) {
/// Compressor should be turned on outside of the streamer
#define TURN_COMPRESSOR_ON 0
void Video::Start(string ip, long port, int _fps_scale, int ttl) {
D( cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__ << "_play=" << _play << endl;)
if(_play) {
D(sensor_port, cerr << "_play=" << _play << endl);
if (_play) {
cerr << "ERROR-->> wrong usage: Video()->Start() when already play!!!" << endl;
return;
}
......@@ -188,7 +194,7 @@ D( cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__ << "_play=" << _play
// f_width = width();
// f_height = height();
fps_scale = _fps_scale;
if(fps_scale < 1)
if (fps_scale < 1)
fps_scale = 1;
fps_scale_c = 0;
/// start compressor...NOTE: Maybe it should not?
......@@ -204,7 +210,7 @@ D( cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__ << "_play=" << _play
}
void Video::Stop(void) {
if(!_play)
if (!_play)
return;
//return;
RTP_Stream::Stop();
......@@ -226,9 +232,9 @@ bool Video::waitDaemonEnabled(int daemonBit) { // <0 - use default
unsigned long this_frame = params->getGPValue(G_THIS_FRAME);
/// No semaphors, so it is possible to miss event and wait until the streamer will be re-enabled before sending message,
/// but it seems not so terrible
D(cerr << " lseek(fd_circbuf" << fd_circbuf << ", LSEEK_DAEMON_CIRCBUF+lastDaemonBit, SEEK_END)... " << endl;)
D(sensor_port, cerr << " lseek(fd_circbuf" << sensor_port << ", LSEEK_DAEMON_CIRCBUF+lastDaemonBit, SEEK_END)... " << endl);
lseek(fd_circbuf, LSEEK_DAEMON_CIRCBUF + lastDaemonBit, SEEK_END); ///
D(cerr << "...done" << endl;)
D(sensor_port, cerr << "...done" << endl);
if (this_frame == params->getGPValue(G_THIS_FRAME))
return true;
......@@ -241,10 +247,10 @@ bool Video::waitDaemonEnabled(int daemonBit) { // <0 - use default
* @return (after possible waiting) true if there was no waiting, false if there was waiting
*/
bool Video::isDaemonEnabled(int daemonBit) { // <0 - use default
if((daemonBit >= 0) && (daemonBit < 32))
if ((daemonBit >= 0) && (daemonBit < 32))
lastDaemonBit = daemonBit;
// return((framePars[GLOBALPARS(G_THIS_FRAME) & PARS_FRAMES_MASK].pars[P_DAEMON_EN] & (1 << lastDaemonBit)) != 0);
return((params->getFrameValue(P_DAEMON_EN) & (1 << lastDaemonBit)) != 0);
return ((params->getFrameValue(P_DAEMON_EN) & (1 << lastDaemonBit)) != 0);
}
......@@ -260,49 +266,51 @@ long Video::getFramePars(struct interframe_params_t *frame_pars, long before, lo
long cur_pointer, p;
long this_pointer = 0;
if(ptr_before > 0) {
if (ptr_before > 0) {
/// if we need some before frame, we should set pointer to saved one (saved with before == 0)
this_pointer = lseek(fd_circbuf, ptr_before, SEEK_SET); /// restore the file pointer
}
if(ptr_before < 0) {
if (ptr_before < 0) {
/// otherwise, set pointer to the actual frame
this_pointer = lseek(fd_circbuf, LSEEK_CIRC_TOWP, SEEK_END); /// byte index in circbuf of the frame start
}
if(ptr_before == 0)
if (ptr_before == 0)
this_pointer = lseek(fd_circbuf, 0, SEEK_CUR); /// save orifinal file pointer
char *char_buffer_ptr = (char *)buffer_ptr;
if(lseek(fd_circbuf, LSEEK_CIRC_VALID, SEEK_END) < 0) { /// Invalid frame - reset to the latest acquired
char *char_buffer_ptr = (char *) buffer_ptr;
if (lseek(fd_circbuf, LSEEK_CIRC_VALID, SEEK_END) < 0) { /// Invalid frame - reset to the latest acquired
this_pointer = lseek(fd_circbuf, LSEEK_CIRC_LAST, SEEK_END); /// Last acquired frame (may be not yet available if none are)
}
cur_pointer = this_pointer;
if(before == 0)
if (before == 0)
lseek(fd_circbuf, LSEEK_CIRC_WAIT, SEEK_END);
while(before && (((p = lseek(fd_circbuf, LSEEK_CIRC_PREV, SEEK_END))) >= 0)) { /// try to get earlier valid frame
while (before && (((p = lseek(fd_circbuf, LSEEK_CIRC_PREV, SEEK_END))) >= 0)) { /// try to get earlier valid frame
cur_pointer = p;
before--;
}
/// if 'before' is still >0 - not enough frames acquired, wait for more
while(before > 0) {
while (before > 0) {
lseek(fd_circbuf, this_pointer, SEEK_SET);
lseek(fd_circbuf, LSEEK_CIRC_WAIT, SEEK_END);
this_pointer = lseek(fd_circbuf, LSEEK_CIRC_NEXT, SEEK_END);
before--;
}
long metadata_start = cur_pointer - 32;
if(metadata_start < 0)
if (metadata_start < 0)
metadata_start += buffer_length;
/// copy the interframe data (timestamps are not yet there)
D(cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__ << " before=" << before << " metadata_start=" << metadata_start << endl;)
D(sensor_port, cerr << " before=" << before << " metadata_start=" << metadata_start << endl);
memcpy(frame_pars, &char_buffer_ptr[metadata_start], 32);
long jpeg_len = frame_pars->frame_length; //! frame_pars->frame_length is now the length of bitstream
if(frame_pars->signffff != 0xffff) {
cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__ << " Wrong signature in getFramePars() (broken frame), frame_pars->signffff="<< frame_pars->signffff << endl;
if (frame_pars->signffff != 0xffff) {
cerr << __FILE__ << ":" << __FUNCTION__ << ":" << __LINE__
<< " Wrong signature in getFramePars() (broken frame), frame_pars->signffff="
<< frame_pars->signffff << endl;
int i;
long * dd =(long *) frame_pars;
cerr << hex << (metadata_start/4) << ": ";
long * dd = (long *) frame_pars;
cerr << hex << (metadata_start / 4) << ": ";
// for (i=0;i<8;i++) {
for (i=0;i<8;i++) {
for (i = 0; i < 8; i++) {
cerr << hex << dd[i] << " ";
}
cerr << dec << endl;
......@@ -312,11 +320,11 @@ D(cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__ << " before=" << befo
}
/// find location of the timestamp and copy it to the frame_pars structure
///==================================
long timestamp_start = (cur_pointer) + ((jpeg_len + CCAM_MMAP_META + 3) & (~0x1f)) + 32 - CCAM_MMAP_META_SEC; //! magic shift - should index first byte of the time stamp
if(timestamp_start >= buffer_length)
long timestamp_start = (cur_pointer) + ((jpeg_len + CCAM_MMAP_META + 3) & (~0x1f)) + 32- CCAM_MMAP_META_SEC; //! magic shift - should index first byte of the time stamp
if (timestamp_start >= buffer_length)
timestamp_start -= buffer_length;
memcpy(&(frame_pars->timestamp_sec), &char_buffer_ptr[timestamp_start], 8);
if(ptr_before == 0)
if (ptr_before == 0)
lseek(fd_circbuf, this_pointer, SEEK_SET); /// restore the file pointer
//D(cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__ << " this_pointer=" << this_pointer << " cur_pointer=" << cur_pointer << endl;)
return cur_pointer;
......@@ -332,23 +340,19 @@ struct video_desc_t Video::get_current_desc(bool with_fps) {
struct video_desc_t video_desc;
video_desc.valid = false;
long ptr = -1;
if((ptr = getFramePars(&frame_pars, FRAMEPARS_BEFORE, -1)) < 0) {
if ((ptr = getFramePars(&frame_pars, FRAMEPARS_BEFORE, -1)) < 0) {
return video_desc;
} else {
if(with_fps) {
if(getFramePars(&prev_pars, FRAMEPARS_BEFORE + 1, ptr) < 0)
if (with_fps) {
if (getFramePars(&prev_pars, FRAMEPARS_BEFORE + 1, ptr) < 0)
return video_desc;
double fps = (frame_pars.timestamp_sec - prev_pars.timestamp_sec);
fps *= 1000000.0;
fps += frame_pars.timestamp_usec;
fps -= prev_pars.timestamp_usec;
D3( float _f = fps;)
fps = 1000000.0 / fps;
video_desc.fps = (used_fps = fps);
D( cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__ << " fps=" << fps << endl;)
// cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__ << " fps=" << video_desc.fps << endl;
D3(if(_f == 0))
D3(cerr << "delta == " << _f << endl << endl;)
D(sensor_port, cerr << " fps=" << fps << endl);
}
}
video_desc.valid = true;
......@@ -361,7 +365,7 @@ D3(cerr << "delta == " << _f << endl << endl;)
}
void Video::fps(float fps) {
if(fps < 0.01)
if (fps < 0.01)
return;
/// currently limiting FPS only works with free running TODO: Add external trigger frequency support.
unsigned long write_data[6];
......@@ -369,12 +373,12 @@ void Video::fps(float fps) {
write_data[0] = FRAMEPARS_SETFRAME;
write_data[1] = target_frame; /// wait then for that frame to be available on the output plus 2 frames for fps to be stable
write_data[2] = P_FP1000SLIM;
write_data[3] = (unsigned long)fps * 1000;
write_data[3] = (unsigned long) fps * 1000;
write_data[4] = P_FPSFLAGS;
write_data[5] = 3;
// long rslt = write(fd_fparmsall, write_data, sizeof(write_data));
int rslt = params->write(write_data, sizeof(write_data));
if(rslt == sizeof(write_data)) { /// written OK
if (rslt == sizeof(write_data)) { /// written OK
// lseek(fd_fparmsall, LSEEK_FRAME_WAIT_ABS + target_frame + FRAMES_SKIP_FPS, SEEK_END); /// skip frames
params->lseek(LSEEK_FRAME_WAIT_ABS + target_frame + FRAMES_SKIP_FPS, SEEK_END); /// skip frames
}
......@@ -392,43 +396,44 @@ long Video::capture(void) {
int before;
///Make sure the streamer is not disabled through the bit in P_DAEMON_EN
// if((framePars[pars->getGPValue(G_THIS_FRAME) & PARS_FRAMES_MASK].pars[P_DAEMON_EN] & (1 << lastDaemonBit)) == 0) {
if((params->getFrameValue(P_DAEMON_EN) & (1 << lastDaemonBit)) == 0) {
if ((params->getFrameValue(P_DAEMON_EN) & (1 << lastDaemonBit)) == 0) {
return -DAEMON_DISABLED; /// return exception (will stop the stream)
}
frameStartByteIndex = lseek(fd_circbuf, LSEEK_CIRC_TOWP, SEEK_END); /// byte index in circbuf of the frame start
latestAvailableFrame_ptr = frameStartByteIndex;
lseek(fd_circbuf, LSEEK_CIRC_WAIT, SEEK_END);
frame_ptr = (char *)((unsigned long)buffer_ptr + latestAvailableFrame_ptr);
frame_ptr = (char *) ((unsigned long) buffer_ptr + latestAvailableFrame_ptr);
//fprintf(stderr, "frame_ptr == %08X; ", frame_ptr);
if(latestAvailableFrame_ptr < 32)
if (latestAvailableFrame_ptr < 32)
latestAvailableFrame_ptr += buffer_length;
latestAvailableFrame_ptr >>= 2;
frame_len = buffer_ptr[latestAvailableFrame_ptr - 1];
// read timestamp
char *ts_ptr = (char *)((unsigned long)frame_ptr + (long)(((frame_len + CCAM_MMAP_META + 3) & (~0x1f)) + 32 - CCAM_MMAP_META_SEC));
char *ts_ptr = (char *) ((unsigned long) frame_ptr + (long) (((frame_len + CCAM_MMAP_META + 3) & (~0x1f)) + 32 - CCAM_MMAP_META_SEC));
unsigned long t[2];
memcpy(&t, (void *)ts_ptr, 8);
memcpy(&t, (void *) ts_ptr, 8);
f_tv.tv_sec = t[0];
f_tv.tv_usec = t[1];
// read Q value
char *meta = (char *)frame_ptr;
char *meta = (char *) frame_ptr;
meta -= 32;
if(meta < (char *)buffer_ptr)
if (meta < (char *) buffer_ptr)
meta += buffer_length;
struct interframe_params_t *fp = (struct interframe_params_t *)meta;
struct interframe_params_t *fp = (struct interframe_params_t *) meta;
/// See if the frame parameters are the same as used when starting the stream,
/// Otherwise check for up to G_SKIP_DIFF_FRAME older frames and return them instead,
/// If that number is exceeded - return exception
/// Each time the latest acquired frame is considered, so we do not need to save frmae poointer additionally
if((fp->width != used_width) || (fp->height != used_height)) {
for(before = 1; before <= (int)params->getGPValue(G_SKIP_DIFF_FRAME); before++) {
if(((frameStartByteIndex = getFramePars(&frame_pars, before))) && (frame_pars.width == used_width) && (frame_pars.height == used_height)) {
if ((fp->width != used_width) || (fp->height != used_height)) {
for (before = 1; before <= (int) params->getGPValue(G_SKIP_DIFF_FRAME); before++) {
if (((frameStartByteIndex = getFramePars(&frame_pars, before)))
&& (frame_pars.width == used_width) && (frame_pars.height == used_height)) {
/// substitute older frame instead of the latest one. Leave wrong timestamp?
/// copying code above (may need some cleanup). Maybe - just move earlier so there will be no code duplication?
latestAvailableFrame_ptr = frameStartByteIndex;
frame_ptr = (char *)((unsigned long)buffer_ptr + latestAvailableFrame_ptr);
if(latestAvailableFrame_ptr < 32)
frame_ptr = (char *) ((unsigned long) buffer_ptr + latestAvailableFrame_ptr);
if (latestAvailableFrame_ptr < 32)
latestAvailableFrame_ptr += buffer_length;
latestAvailableFrame_ptr >>= 2;
frame_len = buffer_ptr[latestAvailableFrame_ptr - 1];
......@@ -443,39 +448,39 @@ long Video::capture(void) {
//cerr << "used_width == " << used_width << "; current_width == " << frame_pars.width << endl;
/// update interframe data pointer
// char *meta = (char *)frame_ptr;
meta = (char *)frame_ptr;
meta = (char *) frame_ptr;
meta -= 32;
if(meta < (char *)buffer_ptr)
if (meta < (char *) buffer_ptr)
meta += buffer_length;
fp = (struct interframe_params_t *)meta;
fp = (struct interframe_params_t *) meta;
break;
}
}
if(before > (int) params->getGPValue(G_SKIP_DIFF_FRAME)) {
D(cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__<< " Killing stream because of frame size change " << endl;)
if (before > (int) params->getGPValue(G_SKIP_DIFF_FRAME)) {
D(sensor_port, cerr << " Killing stream because of frame size change " << endl);
return -SIZE_CHANGE; /// It seems that frame size is changed for good, need to restart the stream
}
D(cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__<< " Waiting for the original frame size to be restored , using " << before << " frames ago" << endl;)
D(sensor_port, cerr << " Waiting for the original frame size to be restored , using " << before << " frames ago" << endl);
}
///long Video::getFramePars(struct interframe_params_t * frame_pars, long before) {
///getGPValue(unsigned long GPNumber)
quality = fp->quality2;
if(qtables_include && quality != f_quality) {
D(cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__<< " Updating quality tables, new quality is " << quality << endl;)
if (qtables_include && quality != f_quality) {
D(sensor_port, cerr << " Updating quality tables, new quality is " << quality << endl);
lseek(fd_jpeghead, frameStartByteIndex | 2, SEEK_END); /// '||2' indicates that we need just quantization tables, not full JPEG header
read(fd_jpeghead, (void *)&qtable[0], 128);
read(fd_jpeghead, (void *) &qtable[0], 128);
}
f_quality = quality;
/*
/*
// check statistic
static bool first = true;
if(first) {
static bool first = true;
if(first) {
struct timeval tv;
gettimeofday(&tv, NULL);
first = false;
fprintf(stderr, "VIDEO first with time: %d:%06d at: %d:%06d\n", f_tv.tv_sec, f_tv.tv_usec, tv.tv_sec, tv.tv_usec);
}
*/
}
*/
return frame_len;
}
......@@ -485,32 +490,32 @@ long Video::process(void) {
int to_send = _plen;
int _qtables_len = 128 + 4;
long frame_len = capture();
if(frame_len == 0) {
if (frame_len == 0) {
//D(cerr << "[";)
// return false;
return 0; /// now never here
} else {
if(frame_len < 0) {
D(cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__<< "capture returned negative" << frame_len << endl;)
if (frame_len < 0) {
D(sensor_port, cerr << "capture returned negative" << frame_len << endl);
// return false;
return frame_len; /// attention (restart) is needed
}
}
// check FPS decimation
bool to_skip = true;
if(fps_scale_c == 0)
if (fps_scale_c == 0)
to_skip = false;
fps_scale_c++;
if(fps_scale_c >= fps_scale)
if (fps_scale_c >= fps_scale)
fps_scale_c = 0;
//cerr << "fps_scale == " << fps_scale << "; fps_scale_c == " << fps_scale_c << "; to_skip == " << to_skip << endl;
if(to_skip)
if (to_skip)
return 1;
int to_send_len = frame_len;
unsigned char h[20 + 4];
int packet_len = 0;
unsigned char *data = (unsigned char *)frame_ptr;
unsigned char *data = (unsigned char *) frame_ptr;
uint64_t t = f_tv.tv_sec;
t *= 90000;
......@@ -519,22 +524,22 @@ D(cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__<< "capture returned ne
double f = f_tv.tv_usec;
f /= 1000000.0;
f *= 90000.0;
timestamp += (uint32_t)f;
timestamp += (uint32_t) f;
uint32_t ts;
ts = timestamp;
ts = htonl(ts);
long offset = 0;
void *v_ptr[4];
int v_len[4] = {0, 0, 0, 0};
int v_len[4] = { 0, 0, 0, 0 };
bool first = true;
while(to_send_len && _play) {
while (to_send_len && _play) {
unsigned long pnum = htons(packet_num);
bool last = false;
to_send = _plen;
if(qtables_include && first)
if (qtables_include && first)
to_send = _plen - _qtables_len;
if(to_send_len <= to_send) {
if (to_send_len <= to_send) {
packet_len = to_send_len;
to_send_len = 0;
last = true;
......@@ -544,41 +549,41 @@ D(cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__<< "capture returned ne
}
// make RTP packet
h[0] = 0x80;
if(!last)
if (!last)
h[1] = _ptype;
else
h[1] = 0x80 + _ptype;
memcpy((void *)&h[2], (void *)&pnum, 2);
memcpy((void *)&h[4], (void *)&ts, 4);
memcpy((void *)&h[8], (void *)&SSRC, 4);
memcpy((void *) &h[2], (void *) &pnum, 2);
memcpy((void *) &h[4], (void *) &ts, 4);
memcpy((void *) &h[8], (void *) &SSRC, 4);
// make MJPEG header
unsigned long off = htonl(offset);
memcpy((void *)&h[12], (void *)&off, 4);
memcpy((void *) &h[12], (void *) &off, 4);
h[12] = 0x00;
h[16] = 0x01;
unsigned int q = f_quality;
if(qtables_include)
if (qtables_include)
q += 128;
h[17] = (unsigned char)(q & 0xFF);
if(f_width <= 2040)
h[17] = (unsigned char) (q & 0xFF);
if (f_width <= 2040)
h[18] = (f_width / 8) & 0xFF;
else
h[18] = 0;
if(f_height <= 2040)
if (f_height <= 2040)
h[19] = (f_height / 8) & 0xFF;
else
h[19] = 0;
h[20] = 0;
h[21] = 0;
unsigned short l = htons(128);
memcpy((void *)&h[22], (void *)&l, 2);
memcpy((void *) &h[22], (void *) &l, 2);
// update RTCP statistic
rtp_packets++;
rtp_octets += packet_len + 8; // data + MJPEG header
// send vector
if(first) {
if (first) {
v_ptr[0] = h;
if(qtables_include) {
if (qtables_include) {
v_len[0] = 24;
v_ptr[1] = qtable;
v_len[1] = 128;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment