Commit e7b3122e authored by Mikhail Karpenko's avatar Mikhail Karpenko

Update debug macros for streamer and video classes

parent 99103819
...@@ -38,11 +38,17 @@ using namespace std; ...@@ -38,11 +38,17 @@ using namespace std;
* @return None * @return None
*/ */
void clean_up(pthread_t *threads, size_t sz) { void clean_up(pthread_t *threads, size_t sz) {
for (size_t i = 0; i < sz; i++) int ret_val;
pthread_cancel(threads[i]);
for (size_t i = 0; i < sz; i++) {
ret_val = pthread_cancel(threads[i]);
if (!ret_val)
cout << "pthread_cancel returned " << ret_val << ", sensor port " << i << endl;
}
} }
int main(int argc, char *argv[]) { int main(int argc, char *argv[]) {
int ret_val;
string opt; string opt;
map<string, string> args; map<string, string> args;
pthread_t threads[SENSOR_PORTS]; pthread_t threads[SENSOR_PORTS];
...@@ -72,8 +78,10 @@ int main(int argc, char *argv[]) { ...@@ -72,8 +78,10 @@ int main(int argc, char *argv[]) {
streamers[i] = new Streamer(args, i); streamers[i] = new Streamer(args, i);
pthread_attr_init(&attr); pthread_attr_init(&attr);
if (!pthread_create(&threads[i], &attr, Streamer::pthread_f, (void *) streamers[i])) { ret_val = pthread_create(&threads[i], &attr, Streamer::pthread_f, (void *) streamers[i]);
cerr << "Can not spawn streamer thread for port " << i << endl; if (ret_val != 0) {
cerr << "Can not spawn streamer thread for port " << i;
cerr << ", pthread_create returned " << ret_val << endl;
clean_up(threads, SENSOR_PORTS); clean_up(threads, SENSOR_PORTS);
exit(EXIT_FAILURE); exit(EXIT_FAILURE);
} }
......
...@@ -47,6 +47,7 @@ public: ...@@ -47,6 +47,7 @@ public:
off_t lseek(off_t offset, int whence) { return ::lseek(fd_fparmsall, offset, whence); } off_t lseek(off_t offset, int whence) { return ::lseek(fd_fparmsall, offset, whence); }
bool daemon_enabled(void); bool daemon_enabled(void);
void setPValue(unsigned long *val_array, int count); void setPValue(unsigned long *val_array, int count);
inline int get_port_num() const {return sensor_port;}
protected: protected:
// static Parameters *_parameters; // static Parameters *_parameters;
......
...@@ -37,15 +37,23 @@ using namespace std; ...@@ -37,15 +37,23 @@ using namespace std;
#define RTSP_DEBUG_2 #define RTSP_DEBUG_2
#ifdef RTSP_DEBUG #ifdef RTSP_DEBUG
#define D(a) a #define D(s_port, a) \
do { \
cerr << __FILE__ << ": " << __FUNCTION__ << ": " << __LINE__ << ": sensor port: " << s_port << " "; \
a; \
} while (0)
#else #else
#define D(a) #define D(s_port, a)
#endif #endif
#ifdef RTSP_DEBUG_2 #ifdef RTSP_DEBUG_2
#define D2(a) a #define D2(s_port, a) \
do { \
cerr << __FILE__ << ": " << __FUNCTION__ << ": " << __LINE__ << ": sensor port: " << s_port << " "; \
a; \
} while (0)
#else #else
#define D2(a) #define D2(s_port, a)
#endif #endif
//Streamer *Streamer::_streamer = NULL; //Streamer *Streamer::_streamer = NULL;
...@@ -65,12 +73,12 @@ Streamer::Streamer(const map<string, string> &_args, int port_num) { ...@@ -65,12 +73,12 @@ Streamer::Streamer(const map<string, string> &_args, int port_num) {
session->video.fps_scale = 1; session->video.fps_scale = 1;
audio_init(); audio_init();
video = new Video(sensor_port, params); video = new Video(sensor_port, params);
if(opt_present("f")) { if (opt_present("f")) {
float fps = 0; float fps = 0;
fps = atof(args["f"].c_str()); fps = atof(args["f"].c_str());
if(fps < 0.1) if (fps < 0.1)
fps = 0; fps = 0;
D( cout << "use fps: " << fps << endl;) D(sensor_port, cout << "use fps: " << fps << endl);
video->fps(fps); video->fps(fps);
} }
rtsp_server = NULL; rtsp_server = NULL;
...@@ -79,10 +87,10 @@ D( cout << "use fps: " << fps << endl;) ...@@ -79,10 +87,10 @@ D( cout << "use fps: " << fps << endl;)
void Streamer::audio_init(void) { void Streamer::audio_init(void) {
if (audio != NULL) { if (audio != NULL) {
D(cerr << "delete audio" << endl;) D(sensor_port, cerr << "delete audio" << endl);
delete audio; delete audio;
} }
D(cout << "audio_enabled == " << session->process_audio << endl;) D(sensor_port, cout << "audio_enabled == " << session->process_audio << endl);
audio = new Audio(session->process_audio, params, session->audio.sample_rate, session->audio.channels); audio = new Audio(session->process_audio, params, session->audio.sample_rate, session->audio.channels);
if (audio->present() && session->process_audio) { if (audio->present() && session->process_audio) {
session->process_audio = true; session->process_audio = true;
...@@ -109,7 +117,7 @@ int Streamer::f_handler(void *ptr, RTSP_Server *rtsp_server, RTSP_Server::event ...@@ -109,7 +117,7 @@ int Streamer::f_handler(void *ptr, RTSP_Server *rtsp_server, RTSP_Server::event
} }
int Streamer::update_settings(bool apply) { int Streamer::update_settings(bool apply) {
D( cerr << "update_settings" << endl;) D(sensor_port, cerr << "update_settings" << endl);
// check settings, normalize its, return 1 if was changed // check settings, normalize its, return 1 if was changed
// update settings at application if apply = 1 and parameters change isn't on-fly safe, update parameters always // update settings at application if apply = 1 and parameters change isn't on-fly safe, update parameters always
...@@ -130,31 +138,31 @@ D( cerr << "update_settings" << endl;) ...@@ -130,31 +138,31 @@ D( cerr << "update_settings" << endl;)
//---------------- //----------------
// frame skip, or FPS scale // frame skip, or FPS scale
int frames_skip = params->getGPValue(P_STROP_FRAMES_SKIP); int frames_skip = params->getGPValue(P_STROP_FRAMES_SKIP);
if(frames_skip < 0 || frames_skip > 0xFFFF) { if (frames_skip < 0 || frames_skip > 0xFFFF) {
if(frames_skip < 0) if (frames_skip < 0)
frames_skip = 0; frames_skip = 0;
if(frames_skip < 0xFFFF) if (frames_skip < 0xFFFF)
frames_skip = 0xFFFF; frames_skip = 0xFFFF;
changes_array[changes_array_i + 0] = P_STROP_FRAMES_SKIP; changes_array[changes_array_i + 0] = P_STROP_FRAMES_SKIP;
changes_array[changes_array_i + 1] = (unsigned long)frames_skip; changes_array[changes_array_i + 1] = (unsigned long) frames_skip;
changes_array_i += 2; changes_array_i += 2;
params_update = true; params_update = true;
} }
frames_skip += 1; // convert to fps_scale format; frames_skip += 1; // convert to fps_scale format;
if(frames_skip != session->video.fps_scale) { if (frames_skip != session->video.fps_scale) {
if(apply) if (apply)
session->video.fps_scale = frames_skip; session->video.fps_scale = frames_skip;
//cerr << "session->video.fps_scale = " << session->video.fps_scale << endl; //cerr << "session->video.fps_scale = " << session->video.fps_scale << endl;
result = true; result = true;
} }
//---------------- //----------------
// transport parameters // transport parameters
bool transport_was_changed = false; bool transport_was_changed = false;
bool param_multicast = params->getGPValue(P_STROP_MCAST_EN); bool param_multicast = params->getGPValue(P_STROP_MCAST_EN);
if(param_multicast || session->rtp_out.multicast) { if (param_multicast || session->rtp_out.multicast) {
// multicast/unicast // multicast/unicast
if(param_multicast != session->rtp_out.multicast) { if (param_multicast != session->rtp_out.multicast) {
if(apply) if (apply)
session->rtp_out.multicast = param_multicast; session->rtp_out.multicast = param_multicast;
transport_was_changed = true; transport_was_changed = true;
} }
...@@ -162,31 +170,31 @@ D( cerr << "update_settings" << endl;) ...@@ -162,31 +170,31 @@ D( cerr << "update_settings" << endl;)
unsigned long ip = params->getGPValue(P_STROP_MCAST_IP); unsigned long ip = params->getGPValue(P_STROP_MCAST_IP);
bool ip_was_changed = false; bool ip_was_changed = false;
// switch custom/default IP // switch custom/default IP
if((ip == 0) && session->rtp_out.ip_custom) if ((ip == 0) && session->rtp_out.ip_custom)
ip_was_changed = true; ip_was_changed = true;
if((ip != 0) && !session->rtp_out.ip_custom) if ((ip != 0) && !session->rtp_out.ip_custom)
ip_was_changed = true; ip_was_changed = true;
// change of custom IP // change of custom IP
if((ip != 0) && session->rtp_out.ip_custom) if ((ip != 0) && session->rtp_out.ip_custom)
if(ip != session->rtp_out.ip_cached) if (ip != session->rtp_out.ip_cached)
ip_was_changed = true; ip_was_changed = true;
if(ip_was_changed) { if (ip_was_changed) {
if(ip != 0) { if (ip != 0) {
struct in_addr a; struct in_addr a;
uint32_t a_min = ntohl(inet_addr("224.0.0.0")); uint32_t a_min = ntohl(inet_addr("224.0.0.0"));
uint32_t a_max = ntohl(inet_addr("239.255.255.255")); uint32_t a_max = ntohl(inet_addr("239.255.255.255"));
if(a_min > a_max) { if (a_min > a_max) {
uint32_t a = a_min; uint32_t a = a_min;
a_min = a_max; a_min = a_max;
a_max = a; a_max = a;
} }
if(ip < a_min) if (ip < a_min)
ip = a_min; ip = a_min;
if(ip > a_max) if (ip > a_max)
ip = a_max; ip = a_max;
a.s_addr = htonl(ip); a.s_addr = htonl(ip);
D( cerr << "multicast ip asked: " << inet_ntoa(a) << endl;) D(sensor_port, cerr << "multicast ip asked: " << inet_ntoa(a) << endl);
if(apply) { if (apply) {
session->rtp_out.ip_cached = ip; session->rtp_out.ip_cached = ip;
session->rtp_out.ip_custom = true; session->rtp_out.ip_custom = true;
session->rtp_out.ip = inet_ntoa(a); session->rtp_out.ip = inet_ntoa(a);
...@@ -196,24 +204,24 @@ D( cerr << "multicast ip asked: " << inet_ntoa(a) << endl;) ...@@ -196,24 +204,24 @@ D( cerr << "multicast ip asked: " << inet_ntoa(a) << endl;)
} }
} else { } else {
struct in_addr a = Socket::mcast_from_local(); struct in_addr a = Socket::mcast_from_local();
D( cerr << "multicast ip generated: " << inet_ntoa(a) << endl;) D(sensor_port, cerr << "multicast ip generated: " << inet_ntoa(a) << endl);
if(apply) { if (apply) {
session->rtp_out.ip_custom = false; session->rtp_out.ip_custom = false;
session->rtp_out.ip = inet_ntoa(a); session->rtp_out.ip = inet_ntoa(a);
} }
} }
transport_was_changed = true; transport_was_changed = true;
} }
D( if(apply)) //D( if(apply))
D( cerr << "actual multicast IP: " << session->rtp_out.ip << endl;) D(sensor_port, if (apply) cerr << "actual multicast IP: " << session->rtp_out.ip << endl);
// port // port
int port = params->getGPValue(P_STROP_MCAST_PORT); int port = params->getGPValue(P_STROP_MCAST_PORT);
if(port != session->rtp_out.port_video) { if (port != session->rtp_out.port_video) {
if(port < 1024) if (port < 1024)
port = 1024; port = 1024;
if(port > 65532) if (port > 65532)
port = 65532; port = 65532;
if(apply) { if (apply) {
session->rtp_out.port_video = port; session->rtp_out.port_video = port;
session->rtp_out.port_audio = port + 2; session->rtp_out.port_audio = port + 2;
changes_array[changes_array_i + 0] = P_STROP_MCAST_PORT; changes_array[changes_array_i + 0] = P_STROP_MCAST_PORT;
...@@ -224,12 +232,12 @@ D( cerr << "actual multicast IP: " << session->rtp_out.ip << endl;) ...@@ -224,12 +232,12 @@ D( cerr << "actual multicast IP: " << session->rtp_out.ip << endl;)
} }
// ttl // ttl
int ttl = params->getGPValue(P_STROP_MCAST_TTL); int ttl = params->getGPValue(P_STROP_MCAST_TTL);
if(ttl != atoi(session->rtp_out.ttl.c_str())) { if (ttl != atoi(session->rtp_out.ttl.c_str())) {
if(ttl < 1) if (ttl < 1)
ttl = 1; ttl = 1;
if(ttl > 15) if (ttl > 15)
ttl = 15; ttl = 15;
if(apply) { if (apply) {
char buf[8]; char buf[8];
sprintf(buf, "%d", ttl); sprintf(buf, "%d", ttl);
session->rtp_out.ttl = buf; session->rtp_out.ttl = buf;
...@@ -240,7 +248,7 @@ D( cerr << "actual multicast IP: " << session->rtp_out.ip << endl;) ...@@ -240,7 +248,7 @@ D( cerr << "actual multicast IP: " << session->rtp_out.ip << endl;)
transport_was_changed = true; transport_was_changed = true;
} }
} }
if(transport_was_changed) if (transport_was_changed)
params_update = true; params_update = true;
//----------------- //-----------------
...@@ -250,63 +258,63 @@ D( cerr << "actual multicast IP: " << session->rtp_out.ip << endl;) ...@@ -250,63 +258,63 @@ D( cerr << "actual multicast IP: " << session->rtp_out.ip << endl;)
bool f_audio_rate = false; bool f_audio_rate = false;
bool f_audio_channels = false; bool f_audio_channels = false;
// - enabled/disabled // - enabled/disabled
if(params->getGPValue(P_STROP_AUDIO_EN) == 0) if (params->getGPValue(P_STROP_AUDIO_EN) == 0)
audio_proc = false; audio_proc = false;
int audio_rate = params->getGPValue(P_STROP_AUDIO_RATE); int audio_rate = params->getGPValue(P_STROP_AUDIO_RATE);
int audio_channels = params->getGPValue(P_STROP_AUDIO_CHANNEL); int audio_channels = params->getGPValue(P_STROP_AUDIO_CHANNEL);
if(audio_proc != session->process_audio) if (audio_proc != session->process_audio)
audio_was_changed = true; audio_was_changed = true;
if(audio_rate != session->audio.sample_rate) if (audio_rate != session->audio.sample_rate)
f_audio_rate = true; f_audio_rate = true;
if(audio_channels != session->audio.channels) if (audio_channels != session->audio.channels)
f_audio_channels = true; f_audio_channels = true;
if((audio_proc || session->process_audio) && (f_audio_rate || f_audio_channels)) if ((audio_proc || session->process_audio) && (f_audio_rate || f_audio_channels))
audio_was_changed = true; audio_was_changed = true;
if(apply) { if (apply) {
bool audio_restarted = false; bool audio_restarted = false;
if(audio_was_changed) { if (audio_was_changed) {
session->process_audio = audio_proc; session->process_audio = audio_proc;
session->audio.sample_rate = audio_rate; session->audio.sample_rate = audio_rate;
session->audio.channels = audio_channels; session->audio.channels = audio_channels;
D2( cerr << "Audio was changed. Should restart it" << endl;) D2(sensor_port, cerr << "Audio was changed. Should restart it" << endl);
audio_init(); audio_init();
audio_restarted = true; audio_restarted = true;
// if audio enable was asked, check what soundcard really is connected // if audio enable was asked, check what soundcard really is connected
if(audio_proc) { if (audio_proc) {
if(!audio->present()) { if (!audio->present()) {
session->process_audio = false; session->process_audio = false;
changes_array[changes_array_i + 0] = P_STROP_AUDIO_EN; changes_array[changes_array_i + 0] = P_STROP_AUDIO_EN;
changes_array[changes_array_i + 1] = 0; changes_array[changes_array_i + 1] = 0;
changes_array_i += 2; changes_array_i += 2;
} }
} }
if(f_audio_rate) { if (f_audio_rate) {
changes_array[changes_array_i + 0] = P_STROP_AUDIO_RATE; changes_array[changes_array_i + 0] = P_STROP_AUDIO_RATE;
changes_array[changes_array_i + 1] = session->audio.sample_rate; changes_array[changes_array_i + 1] = session->audio.sample_rate;
changes_array_i += 2; changes_array_i += 2;
} }
if(f_audio_channels) { if (f_audio_channels) {
changes_array[changes_array_i + 0] = P_STROP_AUDIO_CHANNEL; changes_array[changes_array_i + 0] = P_STROP_AUDIO_CHANNEL;
changes_array[changes_array_i + 1] = session->audio.channels; changes_array[changes_array_i + 1] = session->audio.channels;
changes_array_i += 2; changes_array_i += 2;
} }
} }
// was started before new client - must reinit audio // was started before new client - must reinit audio
if(!audio_restarted && session->process_audio) if (!audio_restarted && session->process_audio)
audio_init(); audio_init();
} }
result = result || audio_was_changed || transport_was_changed; result = result || audio_was_changed || transport_was_changed;
// apply volume if audio is enabled, and volume was changed // apply volume if audio is enabled, and volume was changed
if(session->process_audio) { if (session->process_audio) {
if(audio->present()) { if (audio->present()) {
// check volume // check volume
long volume = audio->volume(); long volume = audio->volume();
int audio_volume = params->getGPValue(P_AUDIO_CAPTURE_VOLUME); int audio_volume = params->getGPValue(P_AUDIO_CAPTURE_VOLUME);
// and apply it // and apply it
if(audio_volume != volume) { if (audio_volume != volume) {
audio->set_volume(audio_volume); audio->set_volume(audio_volume);
changes_array[changes_array_i + 0] = P_AUDIO_CAPTURE_VOLUME; changes_array[changes_array_i + 0] = P_AUDIO_CAPTURE_VOLUME;
changes_array[changes_array_i + 1] = audio->volume(); changes_array[changes_array_i + 1] = audio->volume();
...@@ -318,7 +326,7 @@ D2( cerr << "Audio was changed. Should restart it" << endl;) ...@@ -318,7 +326,7 @@ D2( cerr << "Audio was changed. Should restart it" << endl;)
// update array of changes // update array of changes
// set frame to update // set frame to update
if(apply || params_update) { if (apply || params_update) {
changes_array[0] = FRAMEPARS_SETFRAME; changes_array[0] = FRAMEPARS_SETFRAME;
changes_array[1] = params->getGPValue(G_THIS_FRAME) + 1; changes_array[1] = params->getGPValue(G_THIS_FRAME) + 1;
params->setPValue(changes_array, changes_array_i); params->setPValue(changes_array, changes_array_i);
...@@ -326,39 +334,40 @@ D2( cerr << "Audio was changed. Should restart it" << endl;) ...@@ -326,39 +334,40 @@ D2( cerr << "Audio was changed. Should restart it" << endl;)
//------------------------------ //------------------------------
// update current image settings // update current image settings
// if(apply) { // if(apply) {
// here - create new function from where update all settings // here - create new function from where update all settings
struct video_desc_t video_desc = video->get_current_desc(); struct video_desc_t video_desc = video->get_current_desc();
if(video_desc.valid) { if (video_desc.valid) {
session->video.width = video_desc.width; session->video.width = video_desc.width;
session->video.height = video_desc.height; session->video.height = video_desc.height;
session->video.fps = video_desc.fps; session->video.fps = video_desc.fps;
session->video.fps /= session->video.fps_scale; session->video.fps /= session->video.fps_scale;
} }
session->video.type = video->ptype(); session->video.type = video->ptype();
// } // }
if(result) if (result)
return 1; return 1;
return 0; return 0;
} }
int Streamer::handler(RTSP_Server *rtsp_server, RTSP_Server::event event) { int Streamer::handler(RTSP_Server *rtsp_server, RTSP_Server::event event) {
static bool _play = false; static bool _play = false;
D( cerr << "event: running= " << running << " ";) D(sensor_port, cerr << "event: running= " << running << " ");
switch(event) { switch (event) {
case RTSP_Server::DESCRIBE: /// Update frame size, fps before starting new stream (generating SDP file) case RTSP_Server::DESCRIBE: /// Update frame size, fps before starting new stream (generating SDP file)
update_settings(true); update_settings(true);
break; break;
case RTSP_Server::PARAMS_WAS_CHANGED: /// Update frame size, fps before starting new stream (generating SDP file) case RTSP_Server::PARAMS_WAS_CHANGED: /// Update frame size, fps before starting new stream (generating SDP file)
return (update_settings(false) || !(params->daemon_enabled())); return (update_settings(false) || !(params->daemon_enabled()));
case RTSP_Server::PLAY: case RTSP_Server::PLAY:
D( cerr << "==PLAY==";) D(sensor_port, cerr << "==PLAY==");
if(connected_count == 0) { if (connected_count == 0) {
int ttl = -1; int ttl = -1;
if(session->rtp_out.multicast) if (session->rtp_out.multicast)
ttl = atoi(session->rtp_out.ttl.c_str()); ttl = atoi(session->rtp_out.ttl.c_str());
video->Start(session->rtp_out.ip, session->rtp_out.port_video, session->video.fps_scale, ttl); video->Start(session->rtp_out.ip, session->rtp_out.port_video, session->video.fps_scale,
if(audio != NULL) ttl);
if (audio != NULL)
audio->Start(session->rtp_out.ip, session->rtp_out.port_audio, ttl); audio->Start(session->rtp_out.ip, session->rtp_out.port_audio, ttl);
} }
connected_count++; connected_count++;
...@@ -366,11 +375,11 @@ D( cerr << "==PLAY==";) ...@@ -366,11 +375,11 @@ D( cerr << "==PLAY==";)
running = true; running = true;
break; break;
case RTSP_Server::PAUSE: case RTSP_Server::PAUSE:
D( cerr << "PAUSE";) D(sensor_port, cerr << "PAUSE");
connected_count--; connected_count--;
if(connected_count <= 0) { if (connected_count <= 0) {
video->Stop(); video->Stop();
if(audio != NULL) if (audio != NULL)
audio->Stop(); audio->Stop();
connected_count = 0; connected_count = 0;
_play = false; _play = false;
...@@ -378,15 +387,15 @@ D( cerr << "PAUSE";) ...@@ -378,15 +387,15 @@ D( cerr << "PAUSE";)
} }
break; break;
case RTSP_Server::TEARDOWN: case RTSP_Server::TEARDOWN:
D( cerr << "TEARDOWN";) D(sensor_port, cerr << "TEARDOWN");
if(!running) { if (!running) {
D( cerr << " was not running";) D(sensor_port, cerr << " was not running");
break; break;
} }
connected_count--; connected_count--;
if(connected_count <= 0) { if (connected_count <= 0) {
video->Stop(); video->Stop();
if(audio != NULL) if (audio != NULL)
audio->Stop(); audio->Stop();
connected_count = 0; connected_count = 0;
_play = false; _play = false;
...@@ -394,35 +403,34 @@ D( cerr << " was not running";) ...@@ -394,35 +403,34 @@ D( cerr << " was not running";)
} }
break; break;
case RTSP_Server::RESET: case RTSP_Server::RESET:
D( cerr << "RESET";) D(sensor_port, cerr << "RESET");
if(!running) { if (!running) {
D( cerr << " was not running";) D(sensor_port, cerr << " was not running");
break; break;
} }
video->Stop(); video->Stop();
if(audio != NULL) if (audio != NULL)
audio->Stop(); audio->Stop();
connected_count = 0; connected_count = 0;
_play = false; _play = false;
running = false; running = false;
break; break;
/* /*
case RTSP_Server::IS_DAEMON_ENABLED: case RTSP_Server::IS_DAEMON_ENABLED:
D( cerr << "IS_DAEMON_ENABLED video->isDaemonEnabled(-1)=" << video->isDaemonEnabled(-1) << endl;) D( cerr << "IS_DAEMON_ENABLED video->isDaemonEnabled(-1)=" << video->isDaemonEnabled(-1) << endl;)
return video->isDaemonEnabled(-1); return video->isDaemonEnabled(-1);
break; break;
*/ */
default: default:
D( cerr << "unknown == " << event;) D(sensor_port, cerr << "unknown == " << event);
break; break;
} }
D( cerr << endl;) D(sensor_port, cerr << endl);
return 0; return 0;
} }
void Streamer::Main(void) { void Streamer::Main(void) {
D( cerr << "start Main for sensor port " << sensor_port << endl;) D(sensor_port, cerr << "start Main for sensor port " << sensor_port << endl);
string def_mcast = "232.1.1.1";
int def_port = 20020; int def_port = 20020;
string def_ttl = "2"; string def_ttl = "2";
...@@ -442,18 +450,18 @@ void Streamer::Main(void) { ...@@ -442,18 +450,18 @@ void Streamer::Main(void) {
update_settings(true); update_settings(true);
/// Got here if is and was enabled (may use more actions instead of just "continue" /// Got here if is and was enabled (may use more actions instead of just "continue"
// start RTSP server // start RTSP server
D2( cerr << "start server" << endl;) D2(sensor_port, cerr << "start server" << endl);
if (rtsp_server == NULL) if (rtsp_server == NULL)
rtsp_server = new RTSP_Server(Streamer::f_handler, (void *) this, params, session); rtsp_server = new RTSP_Server(Streamer::f_handler, (void *) this, params, session);
rtsp_server->main(); rtsp_server->main();
D2( cerr << "server was stopped" << endl;) D2(sensor_port, cerr << "server was stopped" << endl);
D2( cerr << "stop video" << endl;) D2(sensor_port, cerr << "stop video" << endl);
video->Stop(); video->Stop();
D2( cerr << "stop audio" << endl;) D2(sensor_port, cerr << "stop audio" << endl);
if (audio != NULL) { if (audio != NULL) {
audio->Stop(); audio->Stop();
// free audio resource - other app can use soundcard // free audio resource - other app can use soundcard
D2( cerr << "delete audio" << endl;) D2(sensor_port, cerr << "delete audio" << endl);
delete audio; delete audio;
audio = NULL; audio = NULL;
} }
......
...@@ -45,19 +45,31 @@ using namespace std; ...@@ -45,19 +45,31 @@ using namespace std;
#define VIDEO_DEBUG_3 // for FPS monitoring #define VIDEO_DEBUG_3 // for FPS monitoring
#ifdef VIDEO_DEBUG #ifdef VIDEO_DEBUG
#define D(a) a #define D(s_port, a) \
do { \
cerr << __FILE__ << ": " << __FUNCTION__ << ": " << __LINE__ << ": sensor port: " << s_port << " "; \
a; \
} while (0)
#else #else
#define D(a) #define D(a)
#endif #endif
#ifdef VIDEO_DEBUG_2 #ifdef VIDEO_DEBUG_2
#define D2(a) a #define D2(s_port, a) \
do { \
cerr << __FILE__ << ": " << __FUNCTION__ << ": " << __LINE__ << ": sensor port: " << s_port << " "; \
a; \
} while (0)
#else #else
#define D2(a) #define D2(a)
#endif #endif
#ifdef VIDEO_DEBUG_3 #ifdef VIDEO_DEBUG_3
#define D3(a) a #define D3(s_port, a) \
do { \
cerr << __FILE__ << ": " << __FUNCTION__ << ": " << __LINE__ << ": sensor port: " << s_port << " "; \
a; \
} while (0)
#else #else
#define D3(a) #define D3(a)
#endif #endif
...@@ -87,8 +99,7 @@ static const char *jhead_file_names[] = { ...@@ -87,8 +99,7 @@ static const char *jhead_file_names[] = {
Video::Video(int port, Parameters *pars) { Video::Video(int port, Parameters *pars) {
string err_msg; string err_msg;
D( cerr << "Video::Video() on port " << port << endl;) D(sensor_port, cerr << "Video::Video() on sensor port " << port << endl);
D( cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__ << endl;)
params = pars; params = pars;
sensor_port = port; sensor_port = port;
stream_name = "video"; stream_name = "video";
...@@ -107,21 +118,16 @@ Video::Video(int port, Parameters *pars) { ...@@ -107,21 +118,16 @@ Video::Video(int port, Parameters *pars) {
err_msg = "can't mmap " + *circbuf_file_names[sensor_port]; err_msg = "can't mmap " + *circbuf_file_names[sensor_port];
throw runtime_error(err_msg); throw runtime_error(err_msg);
} }
cout << "<-- 1" << endl;
// buffer_ptr_s = (unsigned long *) mmap(buffer_ptr + (buffer_length >> 2), buffer_length,
// PROT_READ, MAP_FIXED | MAP_SHARED, fd_circbuf, 0); /// preventing buffer rollovers
buffer_ptr_s = (unsigned long *) mmap(buffer_ptr + (buffer_length >> 2), 100 * 4096, buffer_ptr_s = (unsigned long *) mmap(buffer_ptr + (buffer_length >> 2), 100 * 4096,
PROT_READ, MAP_FIXED | MAP_SHARED, fd_circbuf, 0); /// preventing buffer rollovers PROT_READ, MAP_FIXED | MAP_SHARED, fd_circbuf, 0); /// preventing buffer rollovers
cout << "<-- 2" << endl;
if ((int) buffer_ptr_s == -1) { if ((int) buffer_ptr_s == -1) {
err_msg = "can't create second mmap for " + *circbuf_file_names[sensor_port]; err_msg = "can't create second mmap for " + *circbuf_file_names[sensor_port];
throw runtime_error(err_msg); throw runtime_error(err_msg);
} }
cout << "<-- 3" << endl;
/// Skip several frames if it is just booted /// Skip several frames if it is just booted
/// May get stuck here if compressor is off, it should be enabled externally /// May get stuck here if compressor is off, it should be enabled externally
D( cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__ << " frame=" << params->getGPValue(G_THIS_FRAME) << " buffer_length=" << buffer_length << endl;) D(sensor_port, cerr << " frame=" << params->getGPValue(G_THIS_FRAME) << " buffer_length=" << buffer_length << endl);
while (params->getGPValue(G_THIS_FRAME) < 10) { while (params->getGPValue(G_THIS_FRAME) < 10) {
lseek(fd_circbuf, LSEEK_CIRC_TOWP, SEEK_END); /// get to the end of buffer lseek(fd_circbuf, LSEEK_CIRC_TOWP, SEEK_END); /// get to the end of buffer
lseek(fd_circbuf, LSEEK_CIRC_WAIT, SEEK_END); /// wait frame got ready there lseek(fd_circbuf, LSEEK_CIRC_WAIT, SEEK_END); /// wait frame got ready there
...@@ -129,7 +135,7 @@ Video::Video(int port, Parameters *pars) { ...@@ -129,7 +135,7 @@ Video::Video(int port, Parameters *pars) {
/// One more wait always to make sure compressor is actually running /// One more wait always to make sure compressor is actually running
lseek(fd_circbuf, LSEEK_CIRC_WAIT, SEEK_END); lseek(fd_circbuf, LSEEK_CIRC_WAIT, SEEK_END);
lseek(fd_circbuf, LSEEK_CIRC_WAIT, SEEK_END); lseek(fd_circbuf, LSEEK_CIRC_WAIT, SEEK_END);
D(cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__ << " frame=" << params->getGPValue(G_THIS_FRAME) << " buffer_length=" << buffer_length <<endl;) D(sensor_port, cerr << " frame=" << params->getGPValue(G_THIS_FRAME) << " buffer_length=" << buffer_length <<endl);
fd_jpeghead = open(jhead_file_names[sensor_port], O_RDWR); fd_jpeghead = open(jhead_file_names[sensor_port], O_RDWR);
if (fd_jpeghead < 0) { if (fd_jpeghead < 0) {
err_msg = "can't open " + *jhead_file_names[sensor_port]; err_msg = "can't open " + *jhead_file_names[sensor_port];
...@@ -147,7 +153,7 @@ Video::Video(int port, Parameters *pars) { ...@@ -147,7 +153,7 @@ Video::Video(int port, Parameters *pars) {
// create thread... // create thread...
init_pthread((void *) this); init_pthread((void *) this);
D( cerr << __FILE__<< ":" << __FUNCTION__ << ":" << __LINE__ << endl;) D(sensor_port, cerr << "finish constructor" << endl);
} }
Video::~Video(void) { Video::~Video(void) {
...@@ -169,8 +175,8 @@ Video::~Video(void) { ...@@ -169,8 +175,8 @@ Video::~Video(void) {
/// Compressor should be turned on outside of the streamer /// Compressor should be turned on outside of the streamer
#define TURN_COMPRESSOR_ON 0 #define TURN_COMPRESSOR_ON 0
void Video::Start(string ip, long port, int _fps_scale, int ttl) { void Video::Start(string ip, long port, int _fps_scale, int ttl) {
D( cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__ << "_play=" << _play << endl;) D(sensor_port, cerr << "_play=" << _play << endl);
if(_play) { if (_play) {
cerr << "ERROR-->> wrong usage: Video()->Start() when already play!!!" << endl; cerr << "ERROR-->> wrong usage: Video()->Start() when already play!!!" << endl;
return; return;
} }
...@@ -188,7 +194,7 @@ D( cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__ << "_play=" << _play ...@@ -188,7 +194,7 @@ D( cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__ << "_play=" << _play
// f_width = width(); // f_width = width();
// f_height = height(); // f_height = height();
fps_scale = _fps_scale; fps_scale = _fps_scale;
if(fps_scale < 1) if (fps_scale < 1)
fps_scale = 1; fps_scale = 1;
fps_scale_c = 0; fps_scale_c = 0;
/// start compressor...NOTE: Maybe it should not? /// start compressor...NOTE: Maybe it should not?
...@@ -204,11 +210,11 @@ D( cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__ << "_play=" << _play ...@@ -204,11 +210,11 @@ D( cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__ << "_play=" << _play
} }
void Video::Stop(void) { void Video::Stop(void) {
if(!_play) if (!_play)
return; return;
//return; //return;
RTP_Stream::Stop(); RTP_Stream::Stop();
_play = false; _play = false;
// destroy udp socket // destroy udp socket
prev_jpeg_wp = 0; prev_jpeg_wp = 0;
} }
...@@ -226,9 +232,9 @@ bool Video::waitDaemonEnabled(int daemonBit) { // <0 - use default ...@@ -226,9 +232,9 @@ bool Video::waitDaemonEnabled(int daemonBit) { // <0 - use default
unsigned long this_frame = params->getGPValue(G_THIS_FRAME); unsigned long this_frame = params->getGPValue(G_THIS_FRAME);
/// No semaphors, so it is possible to miss event and wait until the streamer will be re-enabled before sending message, /// No semaphors, so it is possible to miss event and wait until the streamer will be re-enabled before sending message,
/// but it seems not so terrible /// but it seems not so terrible
D(cerr << " lseek(fd_circbuf" << fd_circbuf << ", LSEEK_DAEMON_CIRCBUF+lastDaemonBit, SEEK_END)... " << endl;) D(sensor_port, cerr << " lseek(fd_circbuf" << sensor_port << ", LSEEK_DAEMON_CIRCBUF+lastDaemonBit, SEEK_END)... " << endl);
lseek(fd_circbuf, LSEEK_DAEMON_CIRCBUF + lastDaemonBit, SEEK_END); /// lseek(fd_circbuf, LSEEK_DAEMON_CIRCBUF + lastDaemonBit, SEEK_END); ///
D(cerr << "...done" << endl;) D(sensor_port, cerr << "...done" << endl);
if (this_frame == params->getGPValue(G_THIS_FRAME)) if (this_frame == params->getGPValue(G_THIS_FRAME))
return true; return true;
...@@ -241,10 +247,10 @@ bool Video::waitDaemonEnabled(int daemonBit) { // <0 - use default ...@@ -241,10 +247,10 @@ bool Video::waitDaemonEnabled(int daemonBit) { // <0 - use default
* @return (after possible waiting) true if there was no waiting, false if there was waiting * @return (after possible waiting) true if there was no waiting, false if there was waiting
*/ */
bool Video::isDaemonEnabled(int daemonBit) { // <0 - use default bool Video::isDaemonEnabled(int daemonBit) { // <0 - use default
if((daemonBit >= 0) && (daemonBit < 32)) if ((daemonBit >= 0) && (daemonBit < 32))
lastDaemonBit = daemonBit; lastDaemonBit = daemonBit;
// return((framePars[GLOBALPARS(G_THIS_FRAME) & PARS_FRAMES_MASK].pars[P_DAEMON_EN] & (1 << lastDaemonBit)) != 0); // return((framePars[GLOBALPARS(G_THIS_FRAME) & PARS_FRAMES_MASK].pars[P_DAEMON_EN] & (1 << lastDaemonBit)) != 0);
return((params->getFrameValue(P_DAEMON_EN) & (1 << lastDaemonBit)) != 0); return ((params->getFrameValue(P_DAEMON_EN) & (1 << lastDaemonBit)) != 0);
} }
...@@ -260,63 +266,65 @@ long Video::getFramePars(struct interframe_params_t *frame_pars, long before, lo ...@@ -260,63 +266,65 @@ long Video::getFramePars(struct interframe_params_t *frame_pars, long before, lo
long cur_pointer, p; long cur_pointer, p;
long this_pointer = 0; long this_pointer = 0;
if(ptr_before > 0) { if (ptr_before > 0) {
/// if we need some before frame, we should set pointer to saved one (saved with before == 0) /// if we need some before frame, we should set pointer to saved one (saved with before == 0)
this_pointer = lseek(fd_circbuf, ptr_before, SEEK_SET); /// restore the file pointer this_pointer = lseek(fd_circbuf, ptr_before, SEEK_SET); /// restore the file pointer
} }
if(ptr_before < 0) { if (ptr_before < 0) {
/// otherwise, set pointer to the actual frame /// otherwise, set pointer to the actual frame
this_pointer = lseek(fd_circbuf, LSEEK_CIRC_TOWP, SEEK_END); /// byte index in circbuf of the frame start this_pointer = lseek(fd_circbuf, LSEEK_CIRC_TOWP, SEEK_END); /// byte index in circbuf of the frame start
} }
if(ptr_before == 0) if (ptr_before == 0)
this_pointer = lseek(fd_circbuf, 0, SEEK_CUR); /// save orifinal file pointer this_pointer = lseek(fd_circbuf, 0, SEEK_CUR); /// save orifinal file pointer
char *char_buffer_ptr = (char *)buffer_ptr; char *char_buffer_ptr = (char *) buffer_ptr;
if(lseek(fd_circbuf, LSEEK_CIRC_VALID, SEEK_END) < 0) { /// Invalid frame - reset to the latest acquired if (lseek(fd_circbuf, LSEEK_CIRC_VALID, SEEK_END) < 0) { /// Invalid frame - reset to the latest acquired
this_pointer = lseek(fd_circbuf, LSEEK_CIRC_LAST, SEEK_END); /// Last acquired frame (may be not yet available if none are) this_pointer = lseek(fd_circbuf, LSEEK_CIRC_LAST, SEEK_END); /// Last acquired frame (may be not yet available if none are)
} }
cur_pointer = this_pointer; cur_pointer = this_pointer;
if(before == 0) if (before == 0)
lseek(fd_circbuf, LSEEK_CIRC_WAIT, SEEK_END); lseek(fd_circbuf, LSEEK_CIRC_WAIT, SEEK_END);
while(before && (((p = lseek(fd_circbuf, LSEEK_CIRC_PREV, SEEK_END))) >= 0)) { /// try to get earlier valid frame while (before && (((p = lseek(fd_circbuf, LSEEK_CIRC_PREV, SEEK_END))) >= 0)) { /// try to get earlier valid frame
cur_pointer = p; cur_pointer = p;
before--; before--;
} }
/// if 'before' is still >0 - not enough frames acquired, wait for more /// if 'before' is still >0 - not enough frames acquired, wait for more
while(before > 0) { while (before > 0) {
lseek(fd_circbuf, this_pointer, SEEK_SET); lseek(fd_circbuf, this_pointer, SEEK_SET);
lseek(fd_circbuf, LSEEK_CIRC_WAIT, SEEK_END); lseek(fd_circbuf, LSEEK_CIRC_WAIT, SEEK_END);
this_pointer = lseek(fd_circbuf, LSEEK_CIRC_NEXT, SEEK_END); this_pointer = lseek(fd_circbuf, LSEEK_CIRC_NEXT, SEEK_END);
before--; before--;
} }
long metadata_start = cur_pointer - 32; long metadata_start = cur_pointer - 32;
if(metadata_start < 0) if (metadata_start < 0)
metadata_start += buffer_length; metadata_start += buffer_length;
/// copy the interframe data (timestamps are not yet there) /// copy the interframe data (timestamps are not yet there)
D(cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__ << " before=" << before << " metadata_start=" << metadata_start << endl;) D(sensor_port, cerr << " before=" << before << " metadata_start=" << metadata_start << endl);
memcpy(frame_pars, &char_buffer_ptr[metadata_start], 32); memcpy(frame_pars, &char_buffer_ptr[metadata_start], 32);
long jpeg_len = frame_pars->frame_length; //! frame_pars->frame_length is now the length of bitstream long jpeg_len = frame_pars->frame_length; //! frame_pars->frame_length is now the length of bitstream
if(frame_pars->signffff != 0xffff) { if (frame_pars->signffff != 0xffff) {
cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__ << " Wrong signature in getFramePars() (broken frame), frame_pars->signffff="<< frame_pars->signffff << endl; cerr << __FILE__ << ":" << __FUNCTION__ << ":" << __LINE__
int i; << " Wrong signature in getFramePars() (broken frame), frame_pars->signffff="
long * dd =(long *) frame_pars; << frame_pars->signffff << endl;
cerr << hex << (metadata_start/4) << ": "; int i;
long * dd = (long *) frame_pars;
cerr << hex << (metadata_start / 4) << ": ";
// for (i=0;i<8;i++) { // for (i=0;i<8;i++) {
for (i=0;i<8;i++) { for (i = 0; i < 8; i++) {
cerr << hex << dd[i] << " "; cerr << hex << dd[i] << " ";
} }
cerr << dec << endl; cerr << dec << endl;
return -1; return -1;
} else { } else {
// cerr << hex << (metadata_start/4) << dec << endl; ///************* debug // cerr << hex << (metadata_start/4) << dec << endl; ///************* debug
} }
/// find location of the timestamp and copy it to the frame_pars structure /// find location of the timestamp and copy it to the frame_pars structure
///================================== ///==================================
long timestamp_start = (cur_pointer) + ((jpeg_len + CCAM_MMAP_META + 3) & (~0x1f)) + 32 - CCAM_MMAP_META_SEC; //! magic shift - should index first byte of the time stamp long timestamp_start = (cur_pointer) + ((jpeg_len + CCAM_MMAP_META + 3) & (~0x1f)) + 32- CCAM_MMAP_META_SEC; //! magic shift - should index first byte of the time stamp
if(timestamp_start >= buffer_length) if (timestamp_start >= buffer_length)
timestamp_start -= buffer_length; timestamp_start -= buffer_length;
memcpy(&(frame_pars->timestamp_sec), &char_buffer_ptr[timestamp_start], 8); memcpy(&(frame_pars->timestamp_sec), &char_buffer_ptr[timestamp_start], 8);
if(ptr_before == 0) if (ptr_before == 0)
lseek(fd_circbuf, this_pointer, SEEK_SET); /// restore the file pointer lseek(fd_circbuf, this_pointer, SEEK_SET); /// restore the file pointer
//D(cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__ << " this_pointer=" << this_pointer << " cur_pointer=" << cur_pointer << endl;) //D(cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__ << " this_pointer=" << this_pointer << " cur_pointer=" << cur_pointer << endl;)
return cur_pointer; return cur_pointer;
...@@ -332,23 +340,19 @@ struct video_desc_t Video::get_current_desc(bool with_fps) { ...@@ -332,23 +340,19 @@ struct video_desc_t Video::get_current_desc(bool with_fps) {
struct video_desc_t video_desc; struct video_desc_t video_desc;
video_desc.valid = false; video_desc.valid = false;
long ptr = -1; long ptr = -1;
if((ptr = getFramePars(&frame_pars, FRAMEPARS_BEFORE, -1)) < 0) { if ((ptr = getFramePars(&frame_pars, FRAMEPARS_BEFORE, -1)) < 0) {
return video_desc; return video_desc;
} else { } else {
if(with_fps) { if (with_fps) {
if(getFramePars(&prev_pars, FRAMEPARS_BEFORE + 1, ptr) < 0) if (getFramePars(&prev_pars, FRAMEPARS_BEFORE + 1, ptr) < 0)
return video_desc; return video_desc;
double fps = (frame_pars.timestamp_sec - prev_pars.timestamp_sec); double fps = (frame_pars.timestamp_sec - prev_pars.timestamp_sec);
fps *= 1000000.0; fps *= 1000000.0;
fps += frame_pars.timestamp_usec; fps += frame_pars.timestamp_usec;
fps -= prev_pars.timestamp_usec; fps -= prev_pars.timestamp_usec;
D3( float _f = fps;)
fps = 1000000.0 / fps; fps = 1000000.0 / fps;
video_desc.fps = (used_fps = fps); video_desc.fps = (used_fps = fps);
D( cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__ << " fps=" << fps << endl;) D(sensor_port, cerr << " fps=" << fps << endl);
// cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__ << " fps=" << video_desc.fps << endl;
D3(if(_f == 0))
D3(cerr << "delta == " << _f << endl << endl;)
} }
} }
video_desc.valid = true; video_desc.valid = true;
...@@ -361,7 +365,7 @@ D3(cerr << "delta == " << _f << endl << endl;) ...@@ -361,7 +365,7 @@ D3(cerr << "delta == " << _f << endl << endl;)
} }
void Video::fps(float fps) { void Video::fps(float fps) {
if(fps < 0.01) if (fps < 0.01)
return; return;
/// currently limiting FPS only works with free running TODO: Add external trigger frequency support. /// currently limiting FPS only works with free running TODO: Add external trigger frequency support.
unsigned long write_data[6]; unsigned long write_data[6];
...@@ -369,12 +373,12 @@ void Video::fps(float fps) { ...@@ -369,12 +373,12 @@ void Video::fps(float fps) {
write_data[0] = FRAMEPARS_SETFRAME; write_data[0] = FRAMEPARS_SETFRAME;
write_data[1] = target_frame; /// wait then for that frame to be available on the output plus 2 frames for fps to be stable write_data[1] = target_frame; /// wait then for that frame to be available on the output plus 2 frames for fps to be stable
write_data[2] = P_FP1000SLIM; write_data[2] = P_FP1000SLIM;
write_data[3] = (unsigned long)fps * 1000; write_data[3] = (unsigned long) fps * 1000;
write_data[4] = P_FPSFLAGS; write_data[4] = P_FPSFLAGS;
write_data[5] = 3; write_data[5] = 3;
// long rslt = write(fd_fparmsall, write_data, sizeof(write_data)); // long rslt = write(fd_fparmsall, write_data, sizeof(write_data));
int rslt = params->write(write_data, sizeof(write_data)); int rslt = params->write(write_data, sizeof(write_data));
if(rslt == sizeof(write_data)) { /// written OK if (rslt == sizeof(write_data)) { /// written OK
// lseek(fd_fparmsall, LSEEK_FRAME_WAIT_ABS + target_frame + FRAMES_SKIP_FPS, SEEK_END); /// skip frames // lseek(fd_fparmsall, LSEEK_FRAME_WAIT_ABS + target_frame + FRAMES_SKIP_FPS, SEEK_END); /// skip frames
params->lseek(LSEEK_FRAME_WAIT_ABS + target_frame + FRAMES_SKIP_FPS, SEEK_END); /// skip frames params->lseek(LSEEK_FRAME_WAIT_ABS + target_frame + FRAMES_SKIP_FPS, SEEK_END); /// skip frames
} }
...@@ -392,43 +396,44 @@ long Video::capture(void) { ...@@ -392,43 +396,44 @@ long Video::capture(void) {
int before; int before;
///Make sure the streamer is not disabled through the bit in P_DAEMON_EN ///Make sure the streamer is not disabled through the bit in P_DAEMON_EN
// if((framePars[pars->getGPValue(G_THIS_FRAME) & PARS_FRAMES_MASK].pars[P_DAEMON_EN] & (1 << lastDaemonBit)) == 0) { // if((framePars[pars->getGPValue(G_THIS_FRAME) & PARS_FRAMES_MASK].pars[P_DAEMON_EN] & (1 << lastDaemonBit)) == 0) {
if((params->getFrameValue(P_DAEMON_EN) & (1 << lastDaemonBit)) == 0) { if ((params->getFrameValue(P_DAEMON_EN) & (1 << lastDaemonBit)) == 0) {
return -DAEMON_DISABLED; /// return exception (will stop the stream) return -DAEMON_DISABLED; /// return exception (will stop the stream)
} }
frameStartByteIndex = lseek(fd_circbuf, LSEEK_CIRC_TOWP, SEEK_END); /// byte index in circbuf of the frame start frameStartByteIndex = lseek(fd_circbuf, LSEEK_CIRC_TOWP, SEEK_END); /// byte index in circbuf of the frame start
latestAvailableFrame_ptr = frameStartByteIndex; latestAvailableFrame_ptr = frameStartByteIndex;
lseek(fd_circbuf, LSEEK_CIRC_WAIT, SEEK_END); lseek(fd_circbuf, LSEEK_CIRC_WAIT, SEEK_END);
frame_ptr = (char *)((unsigned long)buffer_ptr + latestAvailableFrame_ptr); frame_ptr = (char *) ((unsigned long) buffer_ptr + latestAvailableFrame_ptr);
//fprintf(stderr, "frame_ptr == %08X; ", frame_ptr); //fprintf(stderr, "frame_ptr == %08X; ", frame_ptr);
if(latestAvailableFrame_ptr < 32) if (latestAvailableFrame_ptr < 32)
latestAvailableFrame_ptr += buffer_length; latestAvailableFrame_ptr += buffer_length;
latestAvailableFrame_ptr >>= 2; latestAvailableFrame_ptr >>= 2;
frame_len = buffer_ptr[latestAvailableFrame_ptr - 1]; frame_len = buffer_ptr[latestAvailableFrame_ptr - 1];
// read timestamp // read timestamp
char *ts_ptr = (char *)((unsigned long)frame_ptr + (long)(((frame_len + CCAM_MMAP_META + 3) & (~0x1f)) + 32 - CCAM_MMAP_META_SEC)); char *ts_ptr = (char *) ((unsigned long) frame_ptr + (long) (((frame_len + CCAM_MMAP_META + 3) & (~0x1f)) + 32 - CCAM_MMAP_META_SEC));
unsigned long t[2]; unsigned long t[2];
memcpy(&t, (void *)ts_ptr, 8); memcpy(&t, (void *) ts_ptr, 8);
f_tv.tv_sec = t[0]; f_tv.tv_sec = t[0];
f_tv.tv_usec = t[1]; f_tv.tv_usec = t[1];
// read Q value // read Q value
char *meta = (char *)frame_ptr; char *meta = (char *) frame_ptr;
meta -= 32; meta -= 32;
if(meta < (char *)buffer_ptr) if (meta < (char *) buffer_ptr)
meta += buffer_length; meta += buffer_length;
struct interframe_params_t *fp = (struct interframe_params_t *)meta; struct interframe_params_t *fp = (struct interframe_params_t *) meta;
/// See if the frame parameters are the same as used when starting the stream, /// See if the frame parameters are the same as used when starting the stream,
/// Otherwise check for up to G_SKIP_DIFF_FRAME older frames and return them instead, /// Otherwise check for up to G_SKIP_DIFF_FRAME older frames and return them instead,
/// If that number is exceeded - return exception /// If that number is exceeded - return exception
/// Each time the latest acquired frame is considered, so we do not need to save frmae poointer additionally /// Each time the latest acquired frame is considered, so we do not need to save frmae poointer additionally
if((fp->width != used_width) || (fp->height != used_height)) { if ((fp->width != used_width) || (fp->height != used_height)) {
for(before = 1; before <= (int)params->getGPValue(G_SKIP_DIFF_FRAME); before++) { for (before = 1; before <= (int) params->getGPValue(G_SKIP_DIFF_FRAME); before++) {
if(((frameStartByteIndex = getFramePars(&frame_pars, before))) && (frame_pars.width == used_width) && (frame_pars.height == used_height)) { if (((frameStartByteIndex = getFramePars(&frame_pars, before)))
&& (frame_pars.width == used_width) && (frame_pars.height == used_height)) {
/// substitute older frame instead of the latest one. Leave wrong timestamp? /// substitute older frame instead of the latest one. Leave wrong timestamp?
/// copying code above (may need some cleanup). Maybe - just move earlier so there will be no code duplication? /// copying code above (may need some cleanup). Maybe - just move earlier so there will be no code duplication?
latestAvailableFrame_ptr = frameStartByteIndex; latestAvailableFrame_ptr = frameStartByteIndex;
frame_ptr = (char *)((unsigned long)buffer_ptr + latestAvailableFrame_ptr); frame_ptr = (char *) ((unsigned long) buffer_ptr + latestAvailableFrame_ptr);
if(latestAvailableFrame_ptr < 32) if (latestAvailableFrame_ptr < 32)
latestAvailableFrame_ptr += buffer_length; latestAvailableFrame_ptr += buffer_length;
latestAvailableFrame_ptr >>= 2; latestAvailableFrame_ptr >>= 2;
frame_len = buffer_ptr[latestAvailableFrame_ptr - 1]; frame_len = buffer_ptr[latestAvailableFrame_ptr - 1];
...@@ -443,40 +448,40 @@ long Video::capture(void) { ...@@ -443,40 +448,40 @@ long Video::capture(void) {
//cerr << "used_width == " << used_width << "; current_width == " << frame_pars.width << endl; //cerr << "used_width == " << used_width << "; current_width == " << frame_pars.width << endl;
/// update interframe data pointer /// update interframe data pointer
// char *meta = (char *)frame_ptr; // char *meta = (char *)frame_ptr;
meta = (char *)frame_ptr; meta = (char *) frame_ptr;
meta -= 32; meta -= 32;
if(meta < (char *)buffer_ptr) if (meta < (char *) buffer_ptr)
meta += buffer_length; meta += buffer_length;
fp = (struct interframe_params_t *)meta; fp = (struct interframe_params_t *) meta;
break; break;
} }
} }
if(before > (int) params->getGPValue(G_SKIP_DIFF_FRAME)) { if (before > (int) params->getGPValue(G_SKIP_DIFF_FRAME)) {
D(cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__<< " Killing stream because of frame size change " << endl;) D(sensor_port, cerr << " Killing stream because of frame size change " << endl);
return -SIZE_CHANGE; /// It seems that frame size is changed for good, need to restart the stream return -SIZE_CHANGE; /// It seems that frame size is changed for good, need to restart the stream
} }
D(cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__<< " Waiting for the original frame size to be restored , using " << before << " frames ago" << endl;) D(sensor_port, cerr << " Waiting for the original frame size to be restored , using " << before << " frames ago" << endl);
} }
///long Video::getFramePars(struct interframe_params_t * frame_pars, long before) { ///long Video::getFramePars(struct interframe_params_t * frame_pars, long before) {
///getGPValue(unsigned long GPNumber) ///getGPValue(unsigned long GPNumber)
quality = fp->quality2; quality = fp->quality2;
if(qtables_include && quality != f_quality) { if (qtables_include && quality != f_quality) {
D(cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__<< " Updating quality tables, new quality is " << quality << endl;) D(sensor_port, cerr << " Updating quality tables, new quality is " << quality << endl);
lseek(fd_jpeghead, frameStartByteIndex | 2, SEEK_END); /// '||2' indicates that we need just quantization tables, not full JPEG header lseek(fd_jpeghead, frameStartByteIndex | 2, SEEK_END); /// '||2' indicates that we need just quantization tables, not full JPEG header
read(fd_jpeghead, (void *)&qtable[0], 128); read(fd_jpeghead, (void *) &qtable[0], 128);
} }
f_quality = quality; f_quality = quality;
/* /*
// check statistic // check statistic
static bool first = true; static bool first = true;
if(first) { if(first) {
struct timeval tv; struct timeval tv;
gettimeofday(&tv, NULL); gettimeofday(&tv, NULL);
first = false; first = false;
fprintf(stderr, "VIDEO first with time: %d:%06d at: %d:%06d\n", f_tv.tv_sec, f_tv.tv_usec, tv.tv_sec, tv.tv_usec); fprintf(stderr, "VIDEO first with time: %d:%06d at: %d:%06d\n", f_tv.tv_sec, f_tv.tv_usec, tv.tv_sec, tv.tv_usec);
} }
*/ */
return frame_len; return frame_len;
} }
long Video::process(void) { long Video::process(void) {
...@@ -485,32 +490,32 @@ long Video::process(void) { ...@@ -485,32 +490,32 @@ long Video::process(void) {
int to_send = _plen; int to_send = _plen;
int _qtables_len = 128 + 4; int _qtables_len = 128 + 4;
long frame_len = capture(); long frame_len = capture();
if(frame_len == 0) { if (frame_len == 0) {
//D(cerr << "[";) //D(cerr << "[";)
// return false; // return false;
return 0; /// now never here return 0; /// now never here
} else { } else {
if(frame_len < 0) { if (frame_len < 0) {
D(cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__<< "capture returned negative" << frame_len << endl;) D(sensor_port, cerr << "capture returned negative" << frame_len << endl);
// return false; // return false;
return frame_len; /// attention (restart) is needed return frame_len; /// attention (restart) is needed
} }
} }
// check FPS decimation // check FPS decimation
bool to_skip = true; bool to_skip = true;
if(fps_scale_c == 0) if (fps_scale_c == 0)
to_skip = false; to_skip = false;
fps_scale_c++; fps_scale_c++;
if(fps_scale_c >= fps_scale) if (fps_scale_c >= fps_scale)
fps_scale_c = 0; fps_scale_c = 0;
//cerr << "fps_scale == " << fps_scale << "; fps_scale_c == " << fps_scale_c << "; to_skip == " << to_skip << endl; //cerr << "fps_scale == " << fps_scale << "; fps_scale_c == " << fps_scale_c << "; to_skip == " << to_skip << endl;
if(to_skip) if (to_skip)
return 1; return 1;
int to_send_len = frame_len; int to_send_len = frame_len;
unsigned char h[20 + 4]; unsigned char h[20 + 4];
int packet_len = 0; int packet_len = 0;
unsigned char *data = (unsigned char *)frame_ptr; unsigned char *data = (unsigned char *) frame_ptr;
uint64_t t = f_tv.tv_sec; uint64_t t = f_tv.tv_sec;
t *= 90000; t *= 90000;
...@@ -519,22 +524,22 @@ D(cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__<< "capture returned ne ...@@ -519,22 +524,22 @@ D(cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__<< "capture returned ne
double f = f_tv.tv_usec; double f = f_tv.tv_usec;
f /= 1000000.0; f /= 1000000.0;
f *= 90000.0; f *= 90000.0;
timestamp += (uint32_t)f; timestamp += (uint32_t) f;
uint32_t ts; uint32_t ts;
ts = timestamp; ts = timestamp;
ts = htonl(ts); ts = htonl(ts);
long offset = 0; long offset = 0;
void *v_ptr[4]; void *v_ptr[4];
int v_len[4] = {0, 0, 0, 0}; int v_len[4] = { 0, 0, 0, 0 };
bool first = true; bool first = true;
while(to_send_len && _play) { while (to_send_len && _play) {
unsigned long pnum = htons(packet_num); unsigned long pnum = htons(packet_num);
bool last = false; bool last = false;
to_send = _plen; to_send = _plen;
if(qtables_include && first) if (qtables_include && first)
to_send = _plen - _qtables_len; to_send = _plen - _qtables_len;
if(to_send_len <= to_send) { if (to_send_len <= to_send) {
packet_len = to_send_len; packet_len = to_send_len;
to_send_len = 0; to_send_len = 0;
last = true; last = true;
...@@ -544,41 +549,41 @@ D(cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__<< "capture returned ne ...@@ -544,41 +549,41 @@ D(cerr << __FILE__<< ":"<< __FUNCTION__ << ":" <<__LINE__<< "capture returned ne
} }
// make RTP packet // make RTP packet
h[0] = 0x80; h[0] = 0x80;
if(!last) if (!last)
h[1] = _ptype; h[1] = _ptype;
else else
h[1] = 0x80 + _ptype; h[1] = 0x80 + _ptype;
memcpy((void *)&h[2], (void *)&pnum, 2); memcpy((void *) &h[2], (void *) &pnum, 2);
memcpy((void *)&h[4], (void *)&ts, 4); memcpy((void *) &h[4], (void *) &ts, 4);
memcpy((void *)&h[8], (void *)&SSRC, 4); memcpy((void *) &h[8], (void *) &SSRC, 4);
// make MJPEG header // make MJPEG header
unsigned long off = htonl(offset); unsigned long off = htonl(offset);
memcpy((void *)&h[12], (void *)&off, 4); memcpy((void *) &h[12], (void *) &off, 4);
h[12] = 0x00; h[12] = 0x00;
h[16] = 0x01; h[16] = 0x01;
unsigned int q = f_quality; unsigned int q = f_quality;
if(qtables_include) if (qtables_include)
q += 128; q += 128;
h[17] = (unsigned char)(q & 0xFF); h[17] = (unsigned char) (q & 0xFF);
if(f_width <= 2040) if (f_width <= 2040)
h[18] = (f_width / 8) & 0xFF; h[18] = (f_width / 8) & 0xFF;
else else
h[18] = 0; h[18] = 0;
if(f_height <= 2040) if (f_height <= 2040)
h[19] = (f_height / 8) & 0xFF; h[19] = (f_height / 8) & 0xFF;
else else
h[19] = 0; h[19] = 0;
h[20] = 0; h[20] = 0;
h[21] = 0; h[21] = 0;
unsigned short l = htons(128); unsigned short l = htons(128);
memcpy((void *)&h[22], (void *)&l, 2); memcpy((void *) &h[22], (void *) &l, 2);
// update RTCP statistic // update RTCP statistic
rtp_packets++; rtp_packets++;
rtp_octets += packet_len + 8; // data + MJPEG header rtp_octets += packet_len + 8; // data + MJPEG header
// send vector // send vector
if(first) { if (first) {
v_ptr[0] = h; v_ptr[0] = h;
if(qtables_include) { if (qtables_include) {
v_len[0] = 24; v_len[0] = 24;
v_ptr[1] = qtable; v_ptr[1] = qtable;
v_len[1] = 128; v_len[1] = 128;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment