view src/mediastreamer/videostream.c @ 12116:e75ef7aa913e

[gaim-migrate @ 14416] " This patch implements a replacement for the queuing system from 1.x. It also obsoletes a previous patch [#1338873] I submitted to prioritize the unseen states in gtk conversations. The attached envelope.png is ripped from the msgunread.png already included in gaim. It should be dropped in the pixmaps directory (Makefile.am is updated accordingly in this patch). The two separate queuing preferences from 1.x, queuing messages while away and queuing all new messages (from docklet), are replaced with a single 3-way preference for conversations. The new preference is "Hide new IM conversations". This preference can be set to never, away and always. When a gtk conversation is created, it may be placed in a hidden conversation window instead of being placed normally. This decision is based upon the preference and possibly the away state of the account the conversation is being created for. This *will* effect conversations the user explicitly requests to be created, so in these cases the caller must be sure to present the conversation to the user, using gaim_gtkconv_present_conversation(). This is done already in gtkdialogs.c which handles creating conversations requested by the user from gaim proper (menus, double-clicking on budy in blist, etc.). The main advantage to not queuing messages is that the conversations exist, the message is written to the conversation (and logged if appropriate) and the unseen state is set on the conversation. This means no additional features are needed to track whether there are queued messages or not, just use the unseen state on conversations. Since conversations may not be visible (messages "queued"), gaim proper needs some notification that there are messages waiting. I opted for a menutray icon that shows up when an im conversation has an unseen message. Clicking this icon will focus (and show if hidden) the first conversation with an unseen message. This is essentially the same behavior of the docklet in cvs right now, except that the icon is only visible when there is a conversation with an unread message. The api that is added is flexible enough to allow either the docklet or the new blist menutray icon to be visible for conversations of any/all types and for unseen messages >= any state. Currently they are set to only IM conversations and only unseen states >= TEXT (system messages and no log messages will not trigger blinking the docklet or showing the blist tray icon), but these could be made preferences relatively easily in the future. Other plugins could probably benefit as well: gaim_gtk_conversations_get_first_unseen(). There is probably some limit to comment size, so I'll stop rambling now. If anyone has more questions/comments, catch me in #gaim, here or on gaim-devel." committer: Tailor Script <tailor@pidgin.im>
author Luke Schierer <lschiere@pidgin.im>
date Wed, 16 Nov 2005 18:17:01 +0000
parents e67993da8a22
children
line wrap: on
line source

/*
  The mediastreamer library aims at providing modular media processing and I/O
        for linphone, but also for any telephony application.
  Copyright (C) 2001  Simon MORLAT simon.morlat@linphone.org
                                                                                
  This library is free software; you can redistribute it and/or
  modify it under the terms of the GNU Lesser General Public
  License as published by the Free Software Foundation; either
  version 2.1 of the License, or (at your option) any later version.

  This library is distributed in the hope that it will be useful,
  but WITHOUT ANY WARRANTY; without even the implied warranty of
  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
  Lesser General Public License for more details.

  You should have received a copy of the GNU Lesser General Public
  License along with this library; if not, write to the Free Software
  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
*/


#include "mediastream.h"
#include "msvideosource.h"
#include "msavdecoder.h"
#include "msavencoder.h"
#include "msnosync.h"
#include "mssdlout.h"

#define USE_SDL

extern void create_duplex_rtpsession(RtpProfile *profile, int locport,char *remip,int remport,
				int payload,int jitt_comp,
			RtpSession **recvsend);

#define MAX_RTP_SIZE	5000

/* this code is not part of the library itself, it is part of the mediastream program */
void
video_stream_free (VideoStream * stream)
{
	RtpSession *recvs, *sends;
	if (stream->rtprecv != NULL)
	{
		recvs = ms_rtp_recv_get_session (MS_RTP_RECV (stream->rtprecv));
		if (recvs != NULL)
		{
			rtp_session_destroy (recvs);
		}
		ms_filter_destroy (stream->rtprecv);
	}
	if (stream->rtpsend != NULL)
	{
		sends = ms_rtp_send_get_session (MS_RTP_SEND (stream->rtpsend));
		if (sends != NULL && sends!=recvs)
		{
			rtp_session_destroy (sends);
		}
		ms_filter_destroy (stream->rtpsend);
	}
	if (stream->source != NULL)
		ms_filter_destroy (stream->source);
	if (stream->output != NULL)
		ms_filter_destroy (stream->output);
	if (stream->decoder != NULL)
		ms_filter_destroy (stream->decoder);
	if (stream->encoder != NULL)
		ms_filter_destroy (stream->encoder);
	if (stream->timer != NULL)
		ms_sync_destroy (stream->timer);
	g_free (stream);
}


VideoStream *
video_stream_start (RtpProfile *profile, int locport, char *remip, int remport,
			 int payload, int jitt_comp, gboolean show_local, 
			 const gchar *source, const gchar *device)
{
	VideoStream *stream = g_new0 (VideoStream, 1);
	RtpSession *rtps, *rtpr;
	PayloadType *pt;
	gchar *format;
	gint width = VIDEO_SIZE_CIF_W;
	gint height = VIDEO_SIZE_CIF_H;
	gfloat fps;

	create_duplex_rtpsession(profile,locport,remip,remport,payload,jitt_comp,&rtpr);
	rtp_session_enable_adaptive_jitter_compensation(rtpr,FALSE);
	rtps=rtpr;
	ms_trace("sending video to %s:%i", remip4, remport);
	
	/* creates two rtp filters to recv send streams (remote part) */
	rtp_session_max_buf_size_set(rtpr,MAX_RTP_SIZE);
	stream->rtpsend = ms_rtp_send_new ();
	if (remport>0) ms_rtp_send_set_session (MS_RTP_SEND (stream->rtpsend), rtps);

	
	stream->rtprecv = ms_rtp_recv_new ();
	ms_rtp_recv_set_session (MS_RTP_RECV (stream->rtprecv), rtpr);

	pt=rtp_profile_get_payload(profile,payload);
	if (pt==NULL){
		g_error("videostream.c: undefined payload type.");
		return NULL;
	}
	ms_trace("videostream.c: getting codecs for %s", pt->mime_type);

	/* creates the filters */
	stream->source = ms_filter_new_with_name(source);
	if (stream->source == NULL){
		g_error("videostream.c: failed to create video source %s.", source);
		return NULL;
	}
	
#ifdef USE_SDL
	stream->output=ms_sdl_out_new();
#else
	stream->output = MS_FILTER(ms_video_output_new ());
#endif
	stream->encoder=ms_encoder_new_with_string_id(pt->mime_type);
	g_message("Video encoder created: %x",stream->encoder);
	stream->decoder=ms_decoder_new_with_string_id(pt->mime_type);
	if ((stream->encoder==NULL) || (stream->decoder==NULL)){
		/* big problem: we have not a registered codec for this payload...*/
		video_stream_free(stream);
		g_error("videostream.c: No codecs available for payload %i.",payload);
		return NULL;
	}

	/* configure the filters */
	ms_video_source_set_device(MS_VIDEO_SOURCE(stream->source), device);
	ms_video_source_set_size(MS_VIDEO_SOURCE(stream->source), width, height);
	ms_video_source_set_frame_rate(MS_VIDEO_SOURCE(stream->source), 8, 1);
	fps = MS_VIDEO_SOURCE(stream->source)->frame_rate / MS_VIDEO_SOURCE(stream->source)->frame_rate_base;
	format = ms_video_source_get_format(MS_VIDEO_SOURCE(stream->source));
	
	ms_AVencoder_set_format (MS_AVENCODER (stream->encoder), format);
	ms_AVencoder_set_width(MS_AVENCODER(stream->encoder), width);
	ms_AVencoder_set_height(MS_AVENCODER(stream->encoder), height);
	/* bitrate is based upon 30fps? adjust by our possibly lower framerate */
	ms_AVencoder_set_bit_rate(MS_AVENCODER(stream->encoder), pt->normal_bitrate * 30 / fps );
	ms_AVdecoder_set_format (MS_AVDECODER (stream->decoder), "YUV420P");
	ms_AVdecoder_set_width(MS_AVDECODER(stream->decoder), width);
	ms_AVdecoder_set_height(MS_AVDECODER(stream->decoder), height);
#ifdef USE_SDL
	/* we suppose our decoder and pin1 of encoder always outputs YUV420P */
	ms_sdl_out_set_format(MS_SDL_OUT(stream->output),"YUV420P");
#else
	ms_video_output_set_size (MS_VIDEO_OUTPUT (stream->output), width, height);
#endif

	/* and then connect all */
	ms_filter_add_link (stream->source, stream->encoder);
	ms_filter_add_link (stream->encoder, stream->rtpsend);
	
	ms_filter_add_link (stream->rtprecv, stream->decoder);
	ms_filter_add_link (stream->decoder, stream->output);
	if (show_local)
		ms_filter_add_link(stream->encoder,stream->output);

	/* create the synchronisation source */
	stream->timer = ms_timer_new(); 
	ms_sync_attach (stream->timer, stream->source);
	ms_sync_attach (stream->timer, stream->rtprecv);

	/* and start */
	ms_video_source_start(MS_VIDEO_SOURCE(stream->source));
	ms_start (stream->timer);
	stream->show_local=show_local;
	return stream;
}



void
video_stream_stop (VideoStream * stream)
{

	ms_stop (stream->timer);
	ms_video_source_stop (MS_VIDEO_SOURCE(stream->source));
	ms_sync_detach (stream->timer, stream->source);
	ms_sync_detach (stream->timer, stream->rtprecv);
	
	ms_filter_remove_links(stream->source,stream->encoder);
	ms_filter_remove_links (stream->encoder,
				 stream->rtpsend);
	
	ms_filter_remove_links (stream->rtprecv,
				 stream->decoder);
	ms_filter_remove_links (stream->decoder,
				 stream->output);
	if (stream->show_local) {
		ms_filter_remove_links (stream->encoder,
					 stream->output);
	}
	video_stream_free (stream);
}


void video_stream_set_rtcp_information(VideoStream *st, const char *cname){
	if (st->send_session!=NULL){
		rtp_session_set_source_description(st->send_session,cname,NULL,NULL,NULL,NULL,"linphone-" LINPHONE_VERSION,
											"This is free software (GPL) !");
	}
}



VideoStream * video_preview_start(const gchar *source, const gchar *device){
	VideoStream *stream = g_new0 (VideoStream, 1);
	gchar *format;
	gint width = VIDEO_SIZE_CIF_W;
	gint height = VIDEO_SIZE_CIF_H;

	/* creates the filters */
	stream->source = ms_filter_new_with_name(source);
	if (stream->source == NULL){
		g_error("videostream.c: failed to create video source %s.", source);
		return NULL;
	}
#ifdef USE_SDL
	stream->output=ms_sdl_out_new();
#else
	stream->output = ms_video_output_new ();
#endif
	/* configure the filters */
	ms_video_source_set_device(MS_VIDEO_SOURCE(stream->source), device);
	ms_video_source_set_size(MS_VIDEO_SOURCE(stream->source), width, height);
	ms_video_source_set_frame_rate(MS_VIDEO_SOURCE(stream->source), 8, 1);
	format = ms_video_source_get_format(MS_VIDEO_SOURCE(stream->source));
	
#ifdef USE_SDL
	ms_sdl_out_set_format(MS_SDL_OUT(stream->output),format);
#else
	ms_video_output_set_format(MS_VIDEO_OUTPUT(stream->output),format);
	ms_video_output_set_size (MS_VIDEO_OUTPUT (stream->output), width, height);
	ms_video_output_set_title(MS_VIDEO_OUTPUT(stream->output),"Linphone Video");
#endif
	/* and then connect all */
	ms_filter_add_link (stream->source, stream->output);
	/* create the synchronisation source */
	stream->timer = ms_timer_new(); 
	ms_sync_attach (stream->timer, stream->source);

	/* and start */
	ms_video_source_start(MS_VIDEO_SOURCE(stream->source));
	ms_start (stream->timer);

	return stream;
}

void video_preview_stop(VideoStream *stream){
	ms_stop (stream->timer);
	ms_video_source_stop (MS_VIDEO_SOURCE(stream->source));
	ms_sync_detach (stream->timer, stream->source);
	ms_filter_remove_links(stream->source,stream->output);
	video_stream_free(stream);
}