changeset 25671:12a16471f94e

Refactored PurpleMedia to make creating audio or video sessions virtually identical. Audio, video, and audio/video sessions now work. Also added videotestsrc to the video plugin preference.
author Mike Ruprecht <maiku@soc.pidgin.im>
date Fri, 06 Jun 2008 07:43:03 +0000
parents 42e17cc5b6d2
children 9983353706b8
files finch/gntmedia.c finch/gntmedia.h libpurple/marshallers.list libpurple/media.c libpurple/media.h libpurple/protocols/jabber/google.c libpurple/protocols/jabber/jabber.c libpurple/protocols/jabber/jingle.c pidgin/gtkconv.c pidgin/gtkmedia.c pidgin/gtkmedia.h pidgin/gtkprefs.c
diffstat 12 files changed, 586 insertions(+), 513 deletions(-) [+]
line wrap: on
line diff
--- a/finch/gntmedia.c	Wed Jun 04 19:21:49 2008 +0000
+++ b/finch/gntmedia.c	Fri Jun 06 07:43:03 2008 +0000
@@ -126,13 +126,13 @@
 			"Send level",
 			"The GstElement of this media's send 'level'",
 			GST_TYPE_ELEMENT,
-			G_PARAM_CONSTRUCT_ONLY | G_PARAM_READWRITE));
+			G_PARAM_READWRITE));
 	g_object_class_install_property(gobject_class, PROP_RECV_LEVEL,
 			g_param_spec_object("recv-level",
 			"Receive level",
 			"The GstElement of this media's recv 'level'",
 			GST_TYPE_ELEMENT,
-			G_PARAM_CONSTRUCT_ONLY | G_PARAM_READWRITE));
+			G_PARAM_READWRITE));
 
 	finch_media_signals[MESSAGE] = g_signal_new("message", G_TYPE_FROM_CLASS(klass),
 					G_SIGNAL_RUN_LAST, 0, NULL, NULL,
@@ -217,7 +217,26 @@
 static void
 finch_media_ready_cb(PurpleMedia *media, FinchMedia *gntmedia)
 {
-	GstElement *element = purple_media_get_audio_pipeline(media);
+	GstElement *element = purple_media_get_pipeline(media);
+
+	GstElement *sendbin, *sendlevel;
+	GstElement *recvbin, *recvlevel;
+
+	GList *sessions = purple_media_get_session_names(media);
+
+	purple_media_audio_init_src(&sendbin, &sendlevel);
+	purple_media_audio_init_recv(&recvbin, &recvlevel);
+
+	for (; sessions; sessions = sessions->next) {
+		purple_media_set_src(media, sessions->data, sendbin);
+		purple_media_set_sink(media, sessions->data, recvbin);
+	}
+	g_list_free(sessions);
+
+	g_object_set(gntmedia, "send-level", &sendlevel,
+		     "recv-level", &recvlevel,
+		     NULL);
+
 	gst_bus_add_signal_watch(GST_BUS(gst_pipeline_get_bus(GST_PIPELINE(element))));
 	g_signal_connect(G_OBJECT(gst_pipeline_get_bus(GST_PIPELINE(element))), "message", G_CALLBACK(level_message_cb), gntmedia);
 }
@@ -377,12 +396,10 @@
 }
 
 GntWidget *
-finch_media_new(PurpleMedia *media, GstElement *sendlevel, GstElement *recvlevel)
+finch_media_new(PurpleMedia *media)
 {
 	return GNT_WIDGET(g_object_new(finch_media_get_type(),
 				"media", media,
-				"send-level", sendlevel,
-				"recv-level", recvlevel,
 				"vertical", FALSE,
 				"homogeneous", FALSE,
 				NULL));
@@ -399,22 +416,14 @@
 static void
 finch_new_media(PurpleMediaManager *manager, PurpleMedia *media, gpointer null)
 {
-	GstElement *sendbin, *sendlevel;
-	GstElement *recvbin, *recvlevel;
 	GntWidget *gntmedia;
 	PurpleConversation *conv;
 
-	purple_media_audio_init_src(&sendbin, &sendlevel);
-	purple_media_audio_init_recv(&recvbin, &recvlevel);
-
-	purple_media_set_audio_src(media, sendbin);
-	purple_media_set_audio_sink(media, recvbin);
-
 	conv = purple_conversation_new(PURPLE_CONV_TYPE_IM,
 			purple_connection_get_account(purple_media_get_connection(media)),
 			purple_media_get_screenname(media));
 
-	gntmedia = finch_media_new(media, sendlevel, recvlevel);
+	gntmedia = finch_media_new(media);
 	g_signal_connect(G_OBJECT(gntmedia), "message", G_CALLBACK(gntmedia_message_cb), conv);
 	FINCH_MEDIA(gntmedia)->priv->conv = conv;
 	finch_conversation_set_info_widget(conv, gntmedia);
--- a/finch/gntmedia.h	Wed Jun 04 19:21:49 2008 +0000
+++ b/finch/gntmedia.h	Fri Jun 06 07:43:03 2008 +0000
@@ -63,7 +63,7 @@
 
 GType finch_media_get_type(void);
 
-GntWidget *finch_media_new(PurpleMedia *media, GstElement *send_level, GstElement *recv_level);
+GntWidget *finch_media_new(PurpleMedia *media);
 
 void finch_media_manager_init(void);
 
--- a/libpurple/marshallers.list	Wed Jun 04 19:21:49 2008 +0000
+++ b/libpurple/marshallers.list	Fri Jun 06 07:43:03 2008 +0000
@@ -1,1 +1,2 @@
 VOID:BOXED,BOXED
+VOID:POINTER,POINTER,OBJECT
--- a/libpurple/media.c	Wed Jun 04 19:21:49 2008 +0000
+++ b/libpurple/media.c	Fri Jun 06 07:43:03 2008 +0000
@@ -38,34 +38,31 @@
 #include <gst/interfaces/propertyprobe.h>
 #include <gst/farsight/fs-conference-iface.h>
 
+struct _PurpleMediaSession
+{
+	gchar *id;
+	PurpleMedia *media;
+	GstElement *src;
+	GstElement *sink;
+	FsSession *session;
+	GHashTable *streams;		/* FsStream list map to participant's name */
+	FsMediaType type;
+	GHashTable *local_candidates;	/* map to participant's name? */
+	FsCandidate *local_candidate;
+	FsCandidate *remote_candidate;
+};
+
 struct _PurpleMediaPrivate
 {
 	FsConference *conference;
 
 	char *name;
 	PurpleConnection *connection;
-	GstElement *audio_src;
-	GstElement *audio_sink;
-	GstElement *video_src;
-	GstElement *video_sink;
-
-	FsSession *audio_session;
-	FsSession *video_session;
 
-	GList *participants; 	/* FsParticipant list */
-	GList *audio_streams;	/* FsStream list */
-	GList *video_streams;	/* FsStream list */
+	GHashTable *sessions;	/* PurpleMediaSession table */
+	GHashTable *participants; /* FsParticipant table */
 
-	/* might be able to just combine these two */
-	GstElement *audio_pipeline;
-	GstElement *video_pipeline;
-
-	/* this will need to be stored/handled per stream
-	 * once having multiple streams is supported */
-	GList *local_candidates;
-
-	FsCandidate *local_candidate;
-	FsCandidate *remote_candidate;
+	GstElement *pipeline;
 };
 
 #define PURPLE_MEDIA_GET_PRIVATE(obj) (G_TYPE_INSTANCE_GET_PRIVATE((obj), PURPLE_TYPE_MEDIA, PurpleMediaPrivate))
@@ -88,6 +85,7 @@
 	REJECT,
 	GOT_HANGUP,
 	GOT_ACCEPT,
+	NEW_CANDIDATE,
 	CANDIDATES_PREPARED,
 	CANDIDATE_PAIR,
 	LAST_SIGNAL
@@ -99,12 +97,6 @@
 	PROP_FS_CONFERENCE,
 	PROP_NAME,
 	PROP_CONNECTION,
-	PROP_AUDIO_SRC,
-	PROP_AUDIO_SINK,
-	PROP_VIDEO_SRC,
-	PROP_VIDEO_SINK,
-	PROP_VIDEO_SESSION,
-	PROP_AUDIO_SESSION
 };
 
 GType
@@ -160,48 +152,6 @@
 			"The PurpleConnection associated with this session",
 			G_PARAM_CONSTRUCT_ONLY | G_PARAM_READWRITE));
 
-	g_object_class_install_property(gobject_class, PROP_AUDIO_SRC,
-			g_param_spec_object("audio-src",
-			"Audio source",
-			"The GstElement used to source audio",
-			GST_TYPE_ELEMENT,
-			G_PARAM_READWRITE));
-
-	g_object_class_install_property(gobject_class, PROP_AUDIO_SINK,
-			g_param_spec_object("audio-sink",
-			"Audio sink",
-			"The GstElement used to sink audio",
-			GST_TYPE_ELEMENT,
-			G_PARAM_READWRITE));
-
-	g_object_class_install_property(gobject_class, PROP_VIDEO_SRC,
-			g_param_spec_object("video-src",
-			"Video source",
-			"The GstElement used to source video",
-			GST_TYPE_ELEMENT,
-			G_PARAM_READWRITE));
-
-	g_object_class_install_property(gobject_class, PROP_VIDEO_SINK,
-			g_param_spec_object("video-sink",
-			"Audio source",
-			"The GstElement used to sink video",
-			GST_TYPE_ELEMENT,
-			G_PARAM_READWRITE));
-
-	g_object_class_install_property(gobject_class, PROP_VIDEO_SESSION,
-			g_param_spec_object("video-session",
-			"Video stream",
-			"The FarsightStream used for video",
-			FS_TYPE_SESSION,
-			G_PARAM_READWRITE));
-
-	g_object_class_install_property(gobject_class, PROP_AUDIO_SESSION,
-			g_param_spec_object("audio-session",
-			"Audio stream",
-			"The FarsightStream used for audio",
-			FS_TYPE_SESSION,
-			G_PARAM_READWRITE));
-
 	purple_media_signals[READY] = g_signal_new("ready", G_TYPE_FROM_CLASS(klass),
 				 	 G_SIGNAL_RUN_LAST, 0, NULL, NULL,
 					 g_cclosure_marshal_VOID__VOID,
@@ -230,6 +180,11 @@
 					 G_SIGNAL_RUN_LAST, 0, NULL, NULL,
 					 g_cclosure_marshal_VOID__VOID,
 					 G_TYPE_NONE, 0);
+	purple_media_signals[NEW_CANDIDATE] = g_signal_new("new-candidate", G_TYPE_FROM_CLASS(klass),
+					 G_SIGNAL_RUN_LAST, 0, NULL, NULL,
+					 purple_smarshal_VOID__POINTER_POINTER_OBJECT,
+					 G_TYPE_NONE, 3, G_TYPE_POINTER,
+					 G_TYPE_POINTER, FS_TYPE_CANDIDATE);
 	purple_media_signals[CANDIDATES_PREPARED] = g_signal_new("candidates-prepared", G_TYPE_FROM_CLASS(klass),
 					 G_SIGNAL_RUN_LAST, 0, NULL, NULL,
 					 g_cclosure_marshal_VOID__VOID,
@@ -254,58 +209,14 @@
 purple_media_finalize (GObject *media)
 {
 	PurpleMediaPrivate *priv = PURPLE_MEDIA_GET_PRIVATE(media);
-	GList *iter;
 	purple_debug_info("media","purple_media_finalize\n");
 
 	g_free(priv->name);
 
-	if (priv->audio_pipeline) {
-		gst_element_set_state(priv->audio_pipeline, GST_STATE_NULL);
-		gst_object_unref(priv->audio_pipeline);
-	}
-	if (priv->video_pipeline) {
-		gst_element_set_state(priv->video_pipeline, GST_STATE_NULL);
-		gst_object_unref(priv->video_pipeline);
-	}
-
-	if (priv->audio_src)
-		gst_object_unref(priv->audio_src);
-	if (priv->audio_sink)
-		gst_object_unref(priv->audio_sink);
-	if (priv->video_src)
-		gst_object_unref(priv->video_src);
-	if (priv->video_sink)
-		gst_object_unref(priv->video_sink);
-
-	for (iter = priv->audio_streams; iter; iter = g_list_next(iter)) {
-		g_object_unref(iter->data);
+	if (priv->pipeline) {
+		gst_element_set_state(priv->pipeline, GST_STATE_NULL);
+		gst_object_unref(priv->pipeline);
 	}
-	g_list_free(priv->audio_streams);
-
-	for (iter = priv->video_streams; iter; iter = g_list_next(iter)) {
-		g_object_unref(iter->data);
-	}
-	g_list_free(priv->video_streams);
-
-	if (priv->audio_session)
-		g_object_unref(priv->audio_session);
-	if (priv->video_session)
-		g_object_unref(priv->video_session);
-
-	for (iter = priv->participants; iter; iter = g_list_next(iter)) {
-		g_object_unref(iter->data);
-	}
-	g_list_free(priv->participants);
-
-	for (iter = priv->local_candidates; iter; iter = g_list_next(iter)) {
-		g_free(iter->data);
-	}
-	g_list_free(priv->local_candidates);
-
-	if (priv->local_candidate)
-		g_free(priv->local_candidate);
-	if (priv->remote_candidate)
-		g_free(priv->remote_candidate);
 
 	gst_object_unref(priv->conference);
 
@@ -334,47 +245,6 @@
 		case PROP_CONNECTION:
 			media->priv->connection = g_value_get_pointer(value);
 			break;
-		case PROP_AUDIO_SRC:
-			if (media->priv->audio_src)
-				gst_object_unref(media->priv->audio_src);
-			media->priv->audio_src = g_value_get_object(value);
-			gst_object_ref(media->priv->audio_src);
-			gst_bin_add(GST_BIN(purple_media_get_audio_pipeline(media)),
-				    media->priv->audio_src);
-			break;
-		case PROP_AUDIO_SINK:
-			if (media->priv->audio_sink)
-				gst_object_unref(media->priv->audio_sink);
-			media->priv->audio_sink = g_value_get_object(value);
-			gst_object_ref(media->priv->audio_sink);
-			gst_bin_add(GST_BIN(purple_media_get_audio_pipeline(media)),
-				    media->priv->audio_sink);
-			break;
-		case PROP_VIDEO_SRC:
-			if (media->priv->video_src)
-				gst_object_unref(media->priv->video_src);
-			media->priv->video_src = g_value_get_object(value);
-			gst_object_ref(media->priv->video_src);
-			break;
-		case PROP_VIDEO_SINK:
-			if (media->priv->video_sink)
-				gst_object_unref(media->priv->video_sink);
-			media->priv->video_sink = g_value_get_object(value);
-			gst_object_ref(media->priv->video_sink);
-			break;
-		case PROP_VIDEO_SESSION:
-			if (media->priv->video_session)
-				g_object_unref(media->priv->video_session);
-			media->priv->video_session = g_value_get_object(value);
-			gst_object_ref(media->priv->video_session);
-			break;
-		case PROP_AUDIO_SESSION:
-			if (media->priv->audio_session)
-				g_object_unref(media->priv->audio_session);
-			media->priv->audio_session = g_value_get_object(value);
-			gst_object_ref(media->priv->audio_session);
-			break;
-
 		default:	
 			G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
 			break;
@@ -399,25 +269,6 @@
 		case PROP_CONNECTION:
 			g_value_set_pointer(value, media->priv->connection);
 			break;
-		case PROP_AUDIO_SRC:
-			g_value_set_object(value, media->priv->audio_src);
-			break;
-		case PROP_AUDIO_SINK:
-			g_value_set_object(value, media->priv->audio_sink);
-			break;
-		case PROP_VIDEO_SRC:
-			g_value_set_object(value, media->priv->video_src);
-			break;
-		case PROP_VIDEO_SINK:
-			g_value_set_object(value, media->priv->video_sink);
-			break;
-		case PROP_VIDEO_SESSION:
-			g_value_set_object(value, media->priv->video_session);
-			break;
-		case PROP_AUDIO_SESSION:
-			g_value_set_object(value, media->priv->audio_session);
-			break;
-
 		default:	
 			G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);	
 			break;
@@ -425,6 +276,97 @@
 
 }
 
+static PurpleMediaSession*
+purple_media_get_session(PurpleMedia *media, const gchar *sess_id)
+{
+	return (PurpleMediaSession*) (media->priv->sessions) ?
+			g_hash_table_lookup(media->priv->sessions, sess_id) : NULL;
+}
+
+static FsParticipant*
+purple_media_get_participant(PurpleMedia *media, const gchar *name)
+{
+	return (FsParticipant*) (media->priv->participants) ?
+			g_hash_table_lookup(media->priv->participants, name) : NULL;
+}
+
+static FsStream*
+purple_media_session_get_stream(PurpleMediaSession *session, const gchar *name)
+{
+	return (FsStream*) (session->streams) ?
+			g_hash_table_lookup(session->streams, name) : NULL;
+}
+
+static GList*
+purple_media_session_get_local_candidates(PurpleMediaSession *session, const gchar *name)
+{
+	return (GList*) (session->local_candidates) ?
+			g_hash_table_lookup(session->local_candidates, name) : NULL;
+}
+
+static void
+purple_media_add_session(PurpleMedia *media, PurpleMediaSession *session)
+{
+	if (!media->priv->sessions) {
+		purple_debug_info("media", "Creating hash table for sessions\n");
+		media->priv->sessions = g_hash_table_new(g_str_hash, g_str_equal);
+	}
+	g_hash_table_insert(media->priv->sessions, g_strdup(session->id), session);
+}
+
+static FsParticipant *
+purple_media_add_participant(PurpleMedia *media, const gchar *name)
+{
+	FsParticipant *participant = purple_media_get_participant(media, name);
+
+	if (participant)
+		return participant;
+
+	participant = fs_conference_new_participant(media->priv->conference, g_strdup(name), NULL);
+
+	if (!media->priv->participants) {
+		purple_debug_info("media", "Creating hash table for participants\n");
+		media->priv->participants = g_hash_table_new(g_str_hash, g_str_equal);
+	}
+
+	g_hash_table_insert(media->priv->participants, g_strdup(name), participant);
+
+	return participant;
+}
+
+static void
+purple_media_insert_stream(PurpleMediaSession *session, const gchar *name, FsStream *stream)
+{
+	if (!session->streams) {
+		purple_debug_info("media", "Creating hash table for streams\n");
+		session->streams = g_hash_table_new(g_str_hash, g_str_equal);
+	}
+
+	g_hash_table_insert(session->streams, g_strdup(name), stream);
+}
+
+static void
+purple_media_insert_local_candidate(PurpleMediaSession *session, const gchar *name,
+				     FsCandidate *candidate)
+{
+	GList *candidates = purple_media_session_get_local_candidates(session, name);
+
+	candidates = g_list_append(candidates, candidate);
+
+	if (!session->local_candidates) {
+		purple_debug_info("media", "Creating hash table for local candidates\n");
+		session->local_candidates = g_hash_table_new(g_str_hash, g_str_equal);
+	}
+
+	g_hash_table_insert(session->local_candidates, g_strdup(name), candidates);
+}
+
+GList *
+purple_media_get_session_names(PurpleMedia *media)
+{
+	return g_hash_table_get_keys(media->priv->sessions);
+}
+
 void 
 purple_media_get_elements(PurpleMedia *media, GstElement **audio_src, GstElement **audio_sink,
                                                   GstElement **video_src, GstElement **video_sink)
@@ -441,70 +383,57 @@
 }
 
 void 
-purple_media_set_audio_src(PurpleMedia *media, GstElement *audio_src)
+purple_media_set_src(PurpleMedia *media, const gchar *sess_id, GstElement *src)
 {
-	g_object_set(G_OBJECT(media), "audio-src", audio_src, NULL);
-}
+	PurpleMediaSession *session = purple_media_get_session(media, sess_id);
+	GstPad *sinkpad;
+	GstPad *srcpad;
+	
+	if (session->src)
+		gst_object_unref(session->src);
+	session->src = src;
+	gst_bin_add(GST_BIN(purple_media_get_pipeline(media)),
+		    session->src);
 
-void 
-purple_media_set_audio_sink(PurpleMedia *media, GstElement *audio_sink)
-{
-	g_object_set(G_OBJECT(media), "audio-sink", audio_sink, NULL);
+	g_object_get(session->session, "sink-pad", &sinkpad, NULL);
+	srcpad = gst_element_get_static_pad(src, "ghostsrc");
+	purple_debug_info("media", "connecting pad: %s\n", 
+			  gst_pad_link(srcpad, sinkpad) == GST_PAD_LINK_OK
+			  ? "success" : "failure");
 }
 
 void 
-purple_media_set_video_src(PurpleMedia *media, GstElement *video_src)
-{
-	g_object_set(G_OBJECT(media), "video-src", video_src, NULL);
-}
-
-void 
-purple_media_set_video_sink(PurpleMedia *media, GstElement *video_sink)
+purple_media_set_sink(PurpleMedia *media, const gchar *sess_id, GstElement *sink)
 {
-	g_object_set(G_OBJECT(media), "video-sink", video_sink, NULL);
-}
-
-GstElement *
-purple_media_get_audio_src(PurpleMedia *media)
-{
-	GstElement *ret;
-	g_object_get(G_OBJECT(media), "audio-src", &ret, NULL);
-	return ret;
+	PurpleMediaSession *session = purple_media_get_session(media, sess_id);
+	if (session->sink)
+		gst_object_unref(session->sink);
+	session->sink = sink;
+	gst_bin_add(GST_BIN(purple_media_get_pipeline(media)),
+		    session->sink);
 }
 
 GstElement *
-purple_media_get_audio_sink(PurpleMedia *media)
+purple_media_get_src(PurpleMedia *media, const gchar *sess_id)
 {
-	GstElement *ret;
-	g_object_get(G_OBJECT(media), "audio-sink", &ret, NULL);
-	return ret;
-}
-
-GstElement *
-purple_media_get_video_src(PurpleMedia *media)
-{
-	GstElement *ret;
-	g_object_get(G_OBJECT(media), "video-src", &ret, NULL);
-	return ret;
+	return purple_media_get_session(media, sess_id)->src;
 }
 
 GstElement *
-purple_media_get_video_sink(PurpleMedia *media)
+purple_media_get_sink(PurpleMedia *media, const gchar *sess_id)
 {
-	GstElement *ret;
-	g_object_get(G_OBJECT(media), "video-sink", &ret, NULL);
-	return ret;
+	return purple_media_get_session(media, sess_id)->src;
 }
 
 GstElement *
-purple_media_get_audio_pipeline(PurpleMedia *media)
+purple_media_get_pipeline(PurpleMedia *media)
 {
-	if (!media->priv->audio_pipeline) {
-		media->priv->audio_pipeline = gst_pipeline_new(media->priv->name);
-		gst_bin_add(GST_BIN(media->priv->audio_pipeline), GST_ELEMENT(media->priv->conference));
+	if (!media->priv->pipeline) {
+		media->priv->pipeline = gst_pipeline_new(media->priv->name);
+		gst_bin_add(GST_BIN(media->priv->pipeline), GST_ELEMENT(media->priv->conference));
 	}
 
-	return media->priv->audio_pipeline;
+	return media->priv->pipeline;
 }
 
 PurpleConnection *
@@ -672,7 +601,7 @@
 
 	purple_debug_info("media", "purple_media_audio_init_src\n");
 
-	*sendbin = gst_bin_new("sendbin");
+	*sendbin = gst_bin_new("purplesendaudiobin");
 	src = gst_element_factory_make("alsasrc", "asrc");
 	*sendlevel = gst_element_factory_make("level", "sendlevel");
 	gst_bin_add_many(GST_BIN(*sendbin), src, *sendlevel, NULL);
@@ -700,6 +629,46 @@
 }
 
 void
+purple_media_video_init_src(GstElement **sendbin)
+{
+	GstElement *src;
+	GstPad *pad;
+	GstPad *ghost;
+	const gchar *video_plugin = purple_prefs_get_string("/purple/media/video/plugin");
+	const gchar *video_device = purple_prefs_get_string("/purple/media/video/device");
+
+	purple_debug_info("media", "purple_media_video_init_src\n");
+
+	*sendbin = gst_bin_new("purplesendvideobin");
+	src = gst_element_factory_make(video_plugin, "videosrc");
+	gst_bin_add(GST_BIN(*sendbin), src);
+
+	if (!strcmp(video_plugin, "videotestsrc")) {
+		/* unless is-live is set to true it doesn't throttle videotestsrc */
+		g_object_set (G_OBJECT(src), "is-live", TRUE, NULL);
+	}
+	pad = gst_element_get_pad(src, "src");
+	ghost = gst_ghost_pad_new("ghostsrc", pad);
+	gst_element_add_pad(*sendbin, ghost);
+
+	/* set current video device on "src"... */
+	if (video_device) {
+		GList *devices = purple_media_get_devices(src);
+		GList *dev = devices;
+		purple_debug_info("media", "Setting device of GstElement src to %s\n",
+				video_device);
+		for (; dev ; dev = dev->next) {
+			GValue *device = (GValue *) dev->data;
+			char *name = purple_media_get_device_name(src, device);
+			if (strcmp(name, video_device) == 0) {
+				purple_media_element_set_device(src, device);
+			}
+			g_free(name);
+		}
+	}
+}
+
+void
 purple_media_audio_init_recv(GstElement **recvbin, GstElement **recvlevel)
 {
 	GstElement *sink;
@@ -707,7 +676,7 @@
 
 	purple_debug_info("media", "purple_media_audio_init_recv\n");
 
-	*recvbin = gst_bin_new("pidginrecvbin");
+	*recvbin = gst_bin_new("pidginrecvaudiobin");
 	sink = gst_element_factory_make("alsasink", "asink");
 	g_object_set(G_OBJECT(sink), "sync", FALSE, NULL);
 	*recvlevel = gst_element_factory_make("level", "recvlevel");
@@ -721,20 +690,54 @@
 	purple_debug_info("media", "purple_media_audio_init_recv end\n");
 }
 
+void
+purple_media_video_init_recv(GstElement **recvbin)
+{
+	GstElement *sink;
+	GstPad *pad, *ghost;
+
+	purple_debug_info("media", "purple_media_video_init_recv\n");
+
+	*recvbin = gst_bin_new("pidginrecvvideobin");
+	sink = gst_element_factory_make("autovideosink", "purplevideosink");
+	gst_bin_add(GST_BIN(*recvbin), sink);
+	pad = gst_element_get_pad(sink, "sink");
+	ghost = gst_ghost_pad_new("ghostsink", pad);
+	gst_element_add_pad(*recvbin, ghost);
+
+	purple_debug_info("media", "purple_media_video_init_recv end\n");
+}
+
 static void
 purple_media_new_local_candidate(FsStream *stream,
 				  FsCandidate *local_candidate,
-				  PurpleMedia *media)
+				  PurpleMediaSession *session)
 {
+	gchar *name;
+	FsParticipant *participant;
 	purple_debug_info("media", "got new local candidate: %s\n", local_candidate->candidate_id);
-	media->priv->local_candidates = g_list_append(media->priv->local_candidates, 
-						      fs_candidate_copy(local_candidate));
+	g_object_get(stream, "participant", &participant, NULL);
+	g_object_get(participant, "cname", &name, NULL);
+	g_object_unref(participant);
+
+	purple_media_insert_local_candidate(session, name, fs_candidate_copy(local_candidate));
+
+	g_signal_emit(session->media, purple_media_signals[NEW_CANDIDATE],
+		      0, session->id, name, fs_candidate_copy(local_candidate));
+
+	g_free(name);
 }
 
 static void
-purple_media_candidates_prepared(FsStream *stream, PurpleMedia *media)
+purple_media_candidates_prepared(FsStream *stream, PurpleMediaSession *session)
 {
-	g_signal_emit(media, purple_media_signals[CANDIDATES_PREPARED], 0);
+	gchar *name;
+	FsParticipant *participant;
+	g_object_get(stream, "participant", &participant, NULL);
+	g_object_get(participant, "cname", &name, NULL);
+	g_object_unref(participant);
+	g_signal_emit(session->media, purple_media_signals[CANDIDATES_PREPARED], 0);
+	g_free(name);
 }
 
 /* callback called when a pair of transport candidates (local and remote)
@@ -743,44 +746,46 @@
 purple_media_candidate_pair_established(FsStream *stream,
 					 FsCandidate *native_candidate,
 					 FsCandidate *remote_candidate,
-					 PurpleMedia *media)
+					 PurpleMediaSession *session)
 {
-	media->priv->local_candidate = fs_candidate_copy(native_candidate);
-	media->priv->remote_candidate = fs_candidate_copy(remote_candidate);
+	session->local_candidate = fs_candidate_copy(native_candidate);
+	session->remote_candidate = fs_candidate_copy(remote_candidate);
 
 	purple_debug_info("media", "candidate pair established\n");
-	g_signal_emit(media, purple_media_signals[CANDIDATE_PAIR], 0,
-		      media->priv->local_candidate,
-		      media->priv->remote_candidate);
+	g_signal_emit(session->media, purple_media_signals[CANDIDATE_PAIR], 0,
+		      session->local_candidate,
+		      session->remote_candidate);
 }
 
 static void
 purple_media_src_pad_added(FsStream *stream, GstPad *srcpad,
-			    FsCodec *codec, PurpleMedia *media)
+			    FsCodec *codec, PurpleMediaSession *session)
 {
-	GstElement *pipeline = purple_media_get_audio_pipeline(media);
-	GstPad *sinkpad = gst_element_get_static_pad(purple_media_get_audio_sink(media), "ghostsink");
+	GstElement *pipeline = purple_media_get_pipeline(session->media);
+	GstPad *sinkpad = gst_element_get_static_pad(session->sink, "ghostsink");
 	purple_debug_info("media", "connecting new src pad: %s\n", 
 			  gst_pad_link(srcpad, sinkpad) == GST_PAD_LINK_OK ? "success" : "failure");
 	gst_element_set_state(pipeline, GST_STATE_PLAYING);
 }
 
 static gboolean
-purple_media_add_stream_internal(PurpleMedia *media, FsSession **session, GList **streams,
-				 GstElement *src, const gchar *who, FsMediaType type,
-				 FsStreamDirection type_direction, const gchar *transmitter)
+purple_media_add_stream_internal(PurpleMedia *media, const gchar *sess_id,
+				 const gchar *who, FsMediaType type,
+				 FsStreamDirection type_direction,
+				 const gchar *transmitter)
 {
-	char *cname = NULL;
+	PurpleMediaSession *session = purple_media_get_session(media, sess_id);
 	FsParticipant *participant = NULL;
-	GList *l = NULL;
 	FsStream *stream = NULL;
-	FsParticipant *p = NULL;
 	FsStreamDirection *direction = NULL;
-	FsSession *s = NULL;
 
-	if (!*session) {
+	if (!session) {
 		GError *err = NULL;
-		*session = fs_conference_new_session(media->priv->conference, type, &err);
+		GList *codec_conf;
+
+		session = g_new0(PurpleMediaSession, 1);
+
+		session->session = fs_conference_new_session(media->priv->conference, type, &err);
 
 		if (err != NULL) {
 			purple_debug_error("media", "Error creating session: %s\n", err->message);
@@ -788,62 +793,64 @@
 			purple_conv_present_error(who,
 						  purple_connection_get_account(purple_media_get_connection(media)),
 						  _("Error creating session."));
+			g_free(session);
 			return FALSE;
 		}
 
-		if (src) {
-			GstPad *sinkpad;
-			GstPad *srcpad;
-			g_object_get(*session, "sink-pad", &sinkpad, NULL);
-			srcpad = gst_element_get_static_pad(src, "ghostsrc");
-			purple_debug_info("media", "connecting pad: %s\n", 
-					  gst_pad_link(srcpad, sinkpad) == GST_PAD_LINK_OK
-					  ? "success" : "failure");
-		}
-	}
-	
-	for (l = media->priv->participants; l != NULL; l = g_list_next(l)) {
-		g_object_get(l->data, "cname", cname, NULL);
-		if (!strcmp(cname, who)) {
-			g_free(cname);
-			participant = l->data;
-			break;
-		}
-		g_free(cname);
+	/*
+	 * None of these three worked for me. THEORA is known to
+	 * not work as of at least Farsight2 0.0.2
+	 */
+		codec_conf = g_list_prepend(NULL, fs_codec_new(FS_CODEC_ID_DISABLE,
+				"THEORA", FS_MEDIA_TYPE_VIDEO, 90000));
+		codec_conf = g_list_prepend(codec_conf, fs_codec_new(FS_CODEC_ID_DISABLE,
+				"MPV", FS_MEDIA_TYPE_VIDEO, 90000));
+		codec_conf = g_list_prepend(codec_conf, fs_codec_new(FS_CODEC_ID_DISABLE,
+				"H264", FS_MEDIA_TYPE_VIDEO, 90000));
+
+	/* XXX: SPEEX has a latency of 5 or 6 seconds for me */
+#if 0
+	/* SPEEX is added through the configuration */
+		codec_conf = g_list_prepend(codec_conf, fs_codec_new(FS_CODEC_ID_ANY,
+				"SPEEX", FS_MEDIA_TYPE_AUDIO, 8000));
+		codec_conf = g_list_prepend(codec_conf, fs_codec_new(FS_CODEC_ID_ANY,
+				"SPEEX", FS_MEDIA_TYPE_AUDIO, 16000));
+#endif
+
+		g_object_set(G_OBJECT(session->session), "local-codecs-config",
+			     codec_conf, NULL);
+
+		fs_codec_list_destroy(codec_conf);
+
+		session->id = g_strdup(sess_id);
+		session->media = media;
+		session->type = type;
+
+		purple_media_add_session(media, session);
 	}
 
-	if (!participant) {
-		participant = fs_conference_new_participant(media->priv->conference, (gchar*)who, NULL);
-		media->priv->participants = g_list_prepend(media->priv->participants, participant);
-	}
-	
-	for (l = *streams; l != NULL; l = g_list_next(l)) {
-		g_object_get(l->data, "participant", &p, "direction", &direction, "session", &s, NULL);
+	participant = purple_media_add_participant(media, who);
 
-		if (participant == p && *session == s) {
-			stream = l->data;
-			break;
-		}
-	}
+	stream = purple_media_session_get_stream(session, who);
 
 	if (!stream) {
-		stream = fs_session_new_stream(*session, participant, 
+		stream = fs_session_new_stream(session->session, participant, 
 					       type_direction, transmitter, 0, NULL, NULL);
-		*streams = g_list_prepend(*streams, stream);
+		purple_media_insert_stream(session, who, stream);
 		/* callback for new local candidate (new local candidate retreived) */
 		g_signal_connect(G_OBJECT(stream),
-				 "new-local-candidate", G_CALLBACK(purple_media_new_local_candidate), media);
+				 "new-local-candidate", G_CALLBACK(purple_media_new_local_candidate), session);
 		/* callback for source pad added (new stream source ready) */
 		g_signal_connect(G_OBJECT(stream),
-				 "src-pad-added", G_CALLBACK(purple_media_src_pad_added), media);
+				 "src-pad-added", G_CALLBACK(purple_media_src_pad_added), session);
 		/* callback for local candidates prepared (local candidates ready to send) */
 		g_signal_connect(G_OBJECT(stream), 
 				 "local-candidates-prepared", 
-				 G_CALLBACK(purple_media_candidates_prepared), media);
+				 G_CALLBACK(purple_media_candidates_prepared), session);
 		/* callback for new active candidate pair (established connection) */
 		g_signal_connect(G_OBJECT(stream),
 				 "new-active-candidate-pair", 
-				 G_CALLBACK(purple_media_candidate_pair_established), media);
+				 G_CALLBACK(purple_media_candidate_pair_established), session);
 	} else if (*direction != type_direction) {	
 		/* change direction */
 		g_object_set(stream, "direction", type_direction, NULL);
@@ -853,7 +860,7 @@
 }
 
 gboolean
-purple_media_add_stream(PurpleMedia *media, const gchar *who,
+purple_media_add_stream(PurpleMedia *media, const gchar *sess_id, const gchar *who,
 			PurpleMediaStreamType type,
 			const gchar *transmitter)
 {
@@ -869,9 +876,7 @@
 		else
 			type_direction = FS_DIRECTION_NONE;
 
-		if (!purple_media_add_stream_internal(media, &media->priv->audio_session,
-						      &media->priv->audio_streams,
-				 		      media->priv->audio_src, who,
+		if (!purple_media_add_stream_internal(media, sess_id, who,
 						      FS_MEDIA_TYPE_AUDIO, type_direction,
 						      transmitter)) {
 			return FALSE;
@@ -887,9 +892,7 @@
 		else
 			type_direction = FS_DIRECTION_NONE;
 
-		if (!purple_media_add_stream_internal(media, &media->priv->video_session,
-						      &media->priv->video_streams,
-				 		      media->priv->video_src, who,
+		if (!purple_media_add_stream_internal(media, sess_id, who,
 						      FS_MEDIA_TYPE_VIDEO, type_direction,
 						      transmitter)) {
 			return FALSE;
@@ -899,76 +902,74 @@
 }
 
 void
-purple_media_remove_stream(PurpleMedia *media, const gchar *who, PurpleMediaStreamType type)
+purple_media_remove_stream(PurpleMedia *media, const gchar *sess_id, const gchar *who)
 {
 	
 }
 
-static FsStream *
-purple_media_get_audio_stream(PurpleMedia *media, const gchar *name)
+PurpleMediaStreamType
+purple_media_get_session_type(PurpleMedia *media, const gchar *sess_id)
 {
-	GList *streams = media->priv->audio_streams;
-	for (; streams; streams = streams->next) {
-		FsParticipant *participant;
-		gchar *cname;
-		g_object_get(streams->data, "participant", &participant, NULL);
-		g_object_get(participant, "cname", &cname, NULL);
-
-		if (!strcmp(cname, name)) {
-			return streams->data;
-		}
-	}
-
-	return NULL;
+	PurpleMediaSession *session = purple_media_get_session(media, sess_id);
+	return session->type;
 }
 
 GList *
-purple_media_get_local_audio_codecs(PurpleMedia *media)
+purple_media_get_local_codecs(PurpleMedia *media, const gchar *sess_id)
 {
 	GList *codecs;
-	g_object_get(G_OBJECT(media->priv->audio_session), "local-codecs", &codecs, NULL);
+	g_object_get(G_OBJECT(purple_media_get_session(media, sess_id)->session),
+		     "local-codecs", &codecs, NULL);
 	return codecs;
 }
 
 GList *
-purple_media_get_local_audio_candidates(PurpleMedia *media)
+purple_media_get_local_candidates(PurpleMedia *media, const gchar *sess_id, const gchar *name)
 {
-	return media->priv->local_candidates;
+	PurpleMediaSession *session = purple_media_get_session(media, sess_id);
+	return purple_media_session_get_local_candidates(session, name);
 }
 
 GList *
-purple_media_get_negotiated_audio_codecs(PurpleMedia *media)
+purple_media_get_negotiated_codecs(PurpleMedia *media, const gchar *sess_id)
 {
+	PurpleMediaSession *session = purple_media_get_session(media, sess_id);
 	GList *codec_intersection;
-	g_object_get(media->priv->audio_session, "negotiated-codecs", &codec_intersection, NULL);
+	g_object_get(session->session, "negotiated-codecs", &codec_intersection, NULL);
 	return codec_intersection;
 }
 
 void
-purple_media_add_remote_audio_candidates(PurpleMedia *media, const gchar *name, GList *remote_candidates)
+purple_media_add_remote_candidates(PurpleMedia *media, const gchar *sess_id,
+				   const gchar *name, GList *remote_candidates)
 {
-	FsStream *stream = purple_media_get_audio_stream(media, name);
+	PurpleMediaSession *session = purple_media_get_session(media, sess_id);
+	FsStream *stream = purple_media_session_get_stream(session, name);
 	GList *candidates = remote_candidates;
 	for (; candidates; candidates = candidates->next)
 		fs_stream_add_remote_candidate(stream, candidates->data, NULL);
 }
 
 FsCandidate *
-purple_media_get_local_candidate(PurpleMedia *media)
+purple_media_get_local_candidate(PurpleMedia *media, const gchar *sess_id, const gchar *name)
 {
-	return media->priv->local_candidate;
+	PurpleMediaSession *session = purple_media_get_session(media, sess_id);
+	return session->local_candidate;
 }
 
 FsCandidate *
-purple_media_get_remote_candidate(PurpleMedia *media)
+purple_media_get_remote_candidate(PurpleMedia *media, const gchar *sess_id, const gchar *name)
 {
-	return media->priv->remote_candidate;
+	PurpleMediaSession *session = purple_media_get_session(media, sess_id);
+	return session->remote_candidate;
 }
 
 void
-purple_media_set_remote_audio_codecs(PurpleMedia *media, const gchar *name, GList *codecs)
+purple_media_set_remote_codecs(PurpleMedia *media, const gchar *sess_id, const gchar *name, GList *codecs)
 {
-	fs_stream_set_remote_codecs(purple_media_get_audio_stream(media, name), codecs, NULL);
+	PurpleMediaSession *session = purple_media_get_session(media, sess_id);
+	FsStream *stream = purple_media_session_get_stream(session, name);
+	fs_stream_set_remote_codecs(stream, codecs, NULL);
 }
 
 #endif  /* USE_VV */
--- a/libpurple/media.h	Wed Jun 04 19:21:49 2008 +0000
+++ b/libpurple/media.h	Fri Jun 06 07:43:03 2008 +0000
@@ -48,6 +48,7 @@
 typedef struct _PurpleMedia PurpleMedia;
 typedef struct _PurpleMediaClass PurpleMediaClass;
 typedef struct _PurpleMediaPrivate PurpleMediaPrivate;
+typedef struct _PurpleMediaSession PurpleMediaSession;
 
 typedef enum {
 	PURPLE_MEDIA_RECV_AUDIO = 1 << 0,
@@ -71,20 +72,18 @@
 
 GType purple_media_get_type(void);
 
+GList *purple_media_get_session_names(PurpleMedia *media);
+
 void purple_media_get_elements(PurpleMedia *media, GstElement **audio_src, GstElement **audio_sink,
 						  GstElement **video_src, GstElement **video_sink);
 
-void purple_media_set_audio_src(PurpleMedia *media, GstElement *video_src);
-void purple_media_set_audio_sink(PurpleMedia *media, GstElement *video_src);
-void purple_media_set_video_src(PurpleMedia *media, GstElement *video_src);
-void purple_media_set_video_sink(PurpleMedia *media, GstElement *video_src);
+void purple_media_set_src(PurpleMedia *media, const gchar *sess_id, GstElement *src);
+void purple_media_set_sink(PurpleMedia *media, const gchar *sess_id, GstElement *src);
 
-GstElement *purple_media_get_audio_src(PurpleMedia *media);
-GstElement *purple_media_get_audio_sink(PurpleMedia *media);
-GstElement *purple_media_get_video_src(PurpleMedia *media);
-GstElement *purple_media_get_video_sink(PurpleMedia *media);
+GstElement *purple_media_get_src(PurpleMedia *media, const gchar *sess_id);
+GstElement *purple_media_get_sink(PurpleMedia *media, const gchar *sess_id);
 
-GstElement *purple_media_get_audio_pipeline(PurpleMedia *media);
+GstElement *purple_media_get_pipeline(PurpleMedia *media);
 
 PurpleConnection *purple_media_get_connection(PurpleMedia *media);
 const char *purple_media_get_screenname(PurpleMedia *media);
@@ -111,20 +110,23 @@
 void purple_media_video_init_src(GstElement **sendbin);
 
 void purple_media_audio_init_recv(GstElement **recvbin, GstElement **recvlevel);
+void purple_media_video_init_recv(GstElement **sendbin);
 
-gboolean purple_media_add_stream(PurpleMedia *media, const gchar *who,
+gboolean purple_media_add_stream(PurpleMedia *media, const gchar *sess_id, const gchar *who,
 			     PurpleMediaStreamType type, const gchar *transmitter);
-void purple_media_remove_stream(PurpleMedia *media, const gchar *who, PurpleMediaStreamType type);
+void purple_media_remove_stream(PurpleMedia *media, const gchar *sess_id, const gchar *who);
+
+PurpleMediaStreamType purple_media_get_session_type(PurpleMedia *media, const gchar *sess_id);
 
-GList *purple_media_get_local_audio_candidates(PurpleMedia *media);
-GList *purple_media_get_negotiated_audio_codecs(PurpleMedia *media);
+GList *purple_media_get_negotiated_codecs(PurpleMedia *media, const gchar *sess_id);
 
-GList *purple_media_get_local_audio_codecs(PurpleMedia *media);
-void purple_media_add_remote_audio_candidates(PurpleMedia *media, const gchar *name,	
-					       GList *remote_candidates);
-FsCandidate *purple_media_get_local_candidate(PurpleMedia *media);
-FsCandidate *purple_media_get_remote_candidate(PurpleMedia *media);
-void purple_media_set_remote_audio_codecs(PurpleMedia *media, const gchar *name, GList *codecs);
+GList *purple_media_get_local_codecs(PurpleMedia *media, const gchar *sess_id);
+void purple_media_add_remote_candidates(PurpleMedia *media, const gchar *sess_id,
+					const gchar *name, GList *remote_candidates);
+GList *purple_media_get_local_candidates(PurpleMedia *media, const gchar *sess_id, const gchar *name);
+FsCandidate *purple_media_get_local_candidate(PurpleMedia *media, const gchar *sess_id, const gchar *name);
+FsCandidate *purple_media_get_remote_candidate(PurpleMedia *media, const gchar *sess_id, const gchar *name);
+void purple_media_set_remote_codecs(PurpleMedia *media, const gchar *sess_id, const gchar *name, GList *codecs);
 
 G_END_DECLS
 
--- a/libpurple/protocols/jabber/google.c	Wed Jun 04 19:21:49 2008 +0000
+++ b/libpurple/protocols/jabber/google.c	Fri Jun 06 07:43:03 2008 +0000
@@ -102,7 +102,7 @@
 google_session_send_accept(GoogleSession *session)
 {
 	xmlnode *sess, *desc, *payload;
-	GList *codecs = purple_media_get_negotiated_audio_codecs(session->media);
+	GList *codecs = purple_media_get_negotiated_codecs(session->media, "google-voice");
 	JabberIq *iq = jabber_iq_new(session->js, JABBER_IQ_SET);
 
 	xmlnode_set_attrib(iq->node, "to", session->remote_jid);
@@ -124,7 +124,7 @@
 
 	fs_codec_list_destroy(codecs);
 	jabber_iq_send(iq);
-	gst_element_set_state(purple_media_get_audio_pipeline(session->media), GST_STATE_PLAYING);
+	gst_element_set_state(purple_media_get_pipeline(session->media), GST_STATE_PLAYING);
 }
 
 static void
@@ -160,7 +160,8 @@
 google_session_candidates_prepared (PurpleMedia *media, GoogleSession *session)
 {
 	JabberIq *iq = jabber_iq_new(session->js, JABBER_IQ_SET);
-	GList *candidates = purple_media_get_local_audio_candidates(session->media);
+	GList *candidates = purple_media_get_local_candidates(session->media, "google-voice",
+							      session->remote_jid);
 	FsCandidate *transport;
 	xmlnode *sess;
 	xmlnode *candidate;
@@ -217,7 +218,7 @@
 							   "fsrtpconference", session->remote_jid);
 
 	/* "rawudp" will need to be changed to "nice" when libnice is finished */
-	purple_media_add_stream(session->media, session->remote_jid, 
+	purple_media_add_stream(session->media, "google-voice", session->remote_jid, 
 				PURPLE_MEDIA_AUDIO, "rawudp");
 
 	desc_element = xmlnode_get_child(sess, "description");
@@ -234,7 +235,7 @@
 		codecs = g_list_append(codecs, codec);
 	}
 
-	purple_media_set_remote_audio_codecs(session->media, session->remote_jid, codecs);
+	purple_media_set_remote_codecs(session->media, "google-voice", session->remote_jid, codecs);
 
 	g_signal_connect_swapped(G_OBJECT(session->media), "accepted",
 				 G_CALLBACK(google_session_send_accept), session);
@@ -282,7 +283,7 @@
 		list = g_list_append(list, info);
 	}
 
-	purple_media_add_remote_audio_candidates(session->media, session->remote_jid, list);
+	purple_media_add_remote_candidates(session->media, "google-voice", session->remote_jid, list);
 	fs_candidate_list_destroy(list);
 
 	result = jabber_iq_new(js, JABBER_IQ_RESULT);
--- a/libpurple/protocols/jabber/jabber.c	Wed Jun 04 19:21:49 2008 +0000
+++ b/libpurple/protocols/jabber/jabber.c	Fri Jun 06 07:43:03 2008 +0000
@@ -2391,7 +2391,6 @@
 		purple_debug_error("jabber", "Could not find buddy\n");
 		return FALSE;
 	}
-#if 0	/* These can be added once we support video */
 	/* XMPP will only support two-way media, AFAIK... */
 	if (type == (PURPLE_MEDIA_AUDIO | PURPLE_MEDIA_VIDEO)) {
 		purple_debug_info("jabber", 
@@ -2399,21 +2398,17 @@
 		return (jabber_buddy_has_capability(jb, XEP_0167_CAP) ||
 				jabber_buddy_has_capability(jb, GTALK_CAP)) && 
 				jabber_buddy_has_capability(jb, XEP_0180_CAP);
-	} else 
-#endif
-	if (type == (PURPLE_MEDIA_AUDIO)) {
+	} else if (type == (PURPLE_MEDIA_AUDIO)) {
 		purple_debug_info("jabber", 
 				  "Checking audio XEP support for %s\n", who);
 		return jabber_buddy_has_capability(jb, XEP_0167_CAP) ||
 				jabber_buddy_has_capability(jb, GTALK_CAP);
-	}
-#if 0
-	 else if (type == (PURPLE_MEDIA_VIDEO)) {
+	} else if (type == (PURPLE_MEDIA_VIDEO)) {
 		purple_debug_info("jabber", 
 				  "Checking video XEP support for %s\n", who);
 		return jabber_buddy_has_capability(jb, XEP_0180_CAP);
 	}
-#endif
+
 	return FALSE;
 }
 
--- a/libpurple/protocols/jabber/jingle.c	Wed Jun 04 19:21:49 2008 +0000
+++ b/libpurple/protocols/jabber/jingle.c	Fri Jun 06 07:43:03 2008 +0000
@@ -278,24 +278,31 @@
 }
 
 static GList *
-jabber_jingle_get_codecs(const xmlnode *description)
+jabber_jingle_get_codecs(xmlnode *description)
 {
 	GList *codecs = NULL;
 	xmlnode *codec_element = NULL;
 	const char *encoding_name,*id, *clock_rate;
 	FsCodec *codec;
-	
+	FsMediaType type = !strcmp(xmlnode_get_namespace(description), JINGLE_VIDEO) ?
+			FS_MEDIA_TYPE_VIDEO : FS_MEDIA_TYPE_AUDIO;
+
 	for (codec_element = xmlnode_get_child(description, "payload-type") ;
 		 codec_element ;
 		 codec_element = xmlnode_get_next_twin(codec_element)) {
 		encoding_name = xmlnode_get_attrib(codec_element, "name");
+
 		id = xmlnode_get_attrib(codec_element, "id");
 		clock_rate = xmlnode_get_attrib(codec_element, "clockrate");
 
 		codec = fs_codec_new(atoi(id), encoding_name, 
-				     FS_MEDIA_TYPE_AUDIO, 
+				     type, 
 				     clock_rate ? atoi(clock_rate) : 0);
-		codecs = g_list_append(codecs, codec);		 
+		purple_debug_info("jingle", "codec: %i, %s, %s, %i\n", codec->id, 
+				codec->encoding_name, codec->media_type == FS_MEDIA_TYPE_AUDIO ?
+				"FS_MEDIA_TYPE_AUDIO" : codec->media_type == FS_MEDIA_TYPE_VIDEO ?
+				"FS_MEDIA_TYPE_VIDEO" : "FS_MEDIA_TYPE_NONE", codec->clock_rate);
+		codecs = g_list_append(codecs, codec);
 	}
 	return codecs;
 }
@@ -397,8 +404,9 @@
 {
 	JingleSession *session = jabber_jingle_session_content_get_session(jsc);
 	PurpleMedia *media = jabber_jingle_session_get_media(session);
-	/* change this to the generic function when PurpleMedia supports video */
-	GList *codecs = purple_media_get_local_audio_codecs(media);
+	/* should this be local_codecs or negotiated-codecs? */
+	GList *codecs = purple_media_get_local_codecs(media,
+			jabber_jingle_session_content_get_name(jsc));
 
 	for (; codecs ; codecs = codecs->next) {
 		FsCodec *codec = (FsCodec*)codecs->data;
@@ -407,8 +415,7 @@
 		
 		g_snprintf(id, sizeof(id), "%d", codec->id);
 		g_snprintf(clockrate, sizeof(clockrate), "%d", codec->clock_rate);
-		g_snprintf(channels, sizeof(channels), "%d",
-			   codec->channels == 0 ? 1 : codec->channels);
+		g_snprintf(channels, sizeof(channels), "%d", codec->channels);
 		
 		xmlnode_set_attrib(payload, "name", codec->encoding_name);
 		xmlnode_set_attrib(payload, "id", id);
@@ -569,35 +576,7 @@
 			   jabber_jingle_session_get_remote_jid(session));
 	return result;
 }
-
-static xmlnode *
-jabber_jingle_session_create_description(const JingleSession *sess)
-{
-    GList *codecs = purple_media_get_local_audio_codecs(sess->media);
-    xmlnode *description = xmlnode_new("description");
-
-	xmlnode_set_namespace(description, JINGLE_AUDIO);
-	
-	/* get codecs */
-	for (; codecs ; codecs = codecs->next) {
-		FsCodec *codec = (FsCodec*)codecs->data;
-		char id[8], clockrate[10], channels[10];
-		xmlnode *payload = xmlnode_new_child(description, "payload-type");
-		
-		g_snprintf(id, sizeof(id), "%d", codec->id);
-		g_snprintf(clockrate, sizeof(clockrate), "%d", codec->clock_rate);
-		g_snprintf(channels, sizeof(channels), "%d", codec->channels);
-		
-		xmlnode_set_attrib(payload, "name", codec->encoding_name);
-		xmlnode_set_attrib(payload, "id", id);
-		xmlnode_set_attrib(payload, "clockrate", clockrate);
-		xmlnode_set_attrib(payload, "channels", channels);
-    }
-    
-    fs_codec_list_destroy(codecs);
-    return description;
-}
-
+#if 0
 static xmlnode *
 jabber_jingle_session_create_content_accept(const JingleSession *sess)
 {
@@ -649,6 +628,7 @@
 	
 	return jingle;
 }
+#endif
 
 static JabberIq *
 jabber_jingle_session_create_session_accept(const JingleSession *session,
@@ -744,7 +724,7 @@
 	jabber_jingle_session_add_candidate_iceudp(transport, candidate, NULL);
 	return request;
 }
-
+#if 0
 static void
 jabber_jingle_session_send_content_accept(JingleSession *session)
 {
@@ -757,7 +737,7 @@
 	xmlnode_insert_child(result->node, jingle);
 	jabber_iq_send(result);
 }
-
+#endif
 static void
 jabber_jingle_session_send_session_accept(JingleSession *session)
 {
@@ -766,8 +746,10 @@
 	GList *contents = jabber_jingle_session_get_contents(session);
 	for (; contents; contents = contents->next) {
 		JingleSessionContent *jsc = contents->data;
-		GList *candidates = purple_media_get_local_audio_candidates(
-				jabber_jingle_session_get_media(session));
+		GList *candidates = purple_media_get_local_candidates(
+				media,
+				jabber_jingle_session_content_get_name(jsc),
+				jabber_jingle_session_get_remote_jid(session));
 		purple_debug_info("jabber",
 				  "jabber_session_candidates_prepared: %d candidates\n",
 				  g_list_length(candidates));
@@ -778,14 +760,21 @@
 			jabber_iq_send(result);
 		}
 		fs_candidate_list_destroy(candidates);
+		purple_debug_info("jingle", "codec intersection: %i\n",
+				g_list_length(purple_media_get_negotiated_codecs(media,
+				jabber_jingle_session_content_get_name(jsc))));
+		jabber_iq_send(jabber_jingle_session_create_session_accept(session, 
+				purple_media_get_local_candidate(media,
+					jabber_jingle_session_content_get_name(jsc),
+					jabber_jingle_session_get_remote_jid(session)),
+				purple_media_get_remote_candidate(media,
+					jabber_jingle_session_content_get_name(jsc),
+					jabber_jingle_session_get_remote_jid(session))));
 	}
 
-	jabber_iq_send(jabber_jingle_session_create_session_accept(session, 
-			purple_media_get_local_candidate(media),
-			purple_media_get_remote_candidate(media)));
 
 	purple_debug_info("jabber", "Sent session accept, starting stream\n");
-	gst_element_set_state(purple_media_get_audio_pipeline(session->media), GST_STATE_PLAYING);
+	gst_element_set_state(purple_media_get_pipeline(session->media), GST_STATE_PLAYING);
 
 	session->session_started = TRUE;
 }
@@ -822,7 +811,8 @@
 		jabber_jingle_session_content_create_internal(session,
 				"audio-content", "initiator", sender,
 				TRANSPORT_ICEUDP, JINGLE_AUDIO);
-	} else if (type & PURPLE_MEDIA_VIDEO) {
+	}
+	if (type & PURPLE_MEDIA_VIDEO) {
 		if (type == PURPLE_MEDIA_SEND_VIDEO)
 			strcpy(sender, "initiator");
 		else if (type == PURPLE_MEDIA_RECV_VIDEO)
@@ -837,9 +827,8 @@
 
 static void
 jabber_jingle_session_content_create_parse(JingleSession *session,
-					   xmlnode *jingle)
+					   xmlnode *content)
 {
-	xmlnode *content = xmlnode_get_child(jingle, "content");
 	xmlnode *description = xmlnode_get_child(content, "description");
 	xmlnode *transport = xmlnode_get_child(content, "transport");
 
@@ -907,6 +896,7 @@
 					      const char *remote_jid)
 {
 	PurpleMedia *media = NULL;
+	GList *contents = jabber_jingle_session_get_contents(session);
 
 	media = purple_media_manager_create_media(purple_media_manager_get(), 
 						  session->js->gc, "fsrtpconference", remote_jid);
@@ -916,12 +906,29 @@
 		return FALSE;
 	}
 
-	/* this will need to be changed to "nice" once the libnice transmitter is finished */
-	if (!purple_media_add_stream(media, remote_jid, PURPLE_MEDIA_AUDIO, "rawudp")) {
-		purple_debug_error("jabber", "Couldn't create audio stream\n");
-		purple_media_reject(media);
-		return FALSE;
+	for (; contents; contents = contents->next) {
+		JingleSessionContent *jsc = contents->data;
+		gboolean result = FALSE;
+
+		/* these will need to be changed to "nice" once the libnice transmitter is finished */
+		if (jabber_jingle_session_content_is_type(jsc, JINGLE_AUDIO)) {
+			result = purple_media_add_stream(media, "audio-content", remote_jid,
+							 PURPLE_MEDIA_AUDIO, "rawudp");
+			purple_debug_info("jingle", "Created Jingle audio session\n");
+		}
+		else if (jabber_jingle_session_content_is_type(jsc, JINGLE_VIDEO)) {
+			result = purple_media_add_stream(media, "video-content", remote_jid,
+							 PURPLE_MEDIA_VIDEO, "rawudp");
+			purple_debug_info("jingle", "Created Jingle video session\n");
+		}
+
+		if (!result) {
+			purple_debug_error("jabber", "Couldn't create stream\n");
+			purple_media_reject(media);
+			return FALSE;
+		}
 	}
+	g_list_free(contents);
 
 	jabber_jingle_session_set_remote_jid(session, remote_jid);
 	jabber_jingle_session_set_initiator(session, initiator);
@@ -967,8 +974,10 @@
 	contents = jabber_jingle_session_get_contents(session);
 	for (; contents; contents = contents->next) {
 		JingleSessionContent *jsc = contents->data;
-		GList *candidates = purple_media_get_local_audio_candidates(
-				jabber_jingle_session_get_media(session));
+		GList *candidates = purple_media_get_local_candidates(
+				jabber_jingle_session_get_media(session),
+				jabber_jingle_session_content_get_name(jsc),
+				jabber_jingle_session_get_remote_jid(session));
 		purple_debug_info("jabber",
 				  "jabber_session_candidates_prepared: %d candidates\n",
 				  g_list_length(candidates));
@@ -1012,6 +1021,8 @@
 	}
 	
 	session = jabber_jingle_session_create(js);
+	jabber_jingle_session_content_create_media(session, type);
+
 	/* set ourselves as initiator */
 	me = g_strdup_printf("%s@%s/%s", js->user->node, js->user->domain, js->user->resource);
 
@@ -1025,8 +1036,6 @@
 	g_free(jid);
 	g_free(me);
 
-	jabber_jingle_session_content_create_media(session, type);
-
 	/* create request */
 	request = jabber_jingle_session_create_session_initiate(session);
 	jabber_iq_set_callback(request, jabber_jingle_session_initiate_result_cb, NULL);
@@ -1064,6 +1073,7 @@
 void
 jabber_jingle_session_handle_content_replace(JabberStream *js, xmlnode *packet)
 {
+#if 0
 	xmlnode *jingle = xmlnode_get_child(packet, "jingle");
 	const char *sid = xmlnode_get_attrib(jingle, "sid");
 	JingleSession *session = jabber_jingle_session_find_by_id(js, sid);
@@ -1086,6 +1096,7 @@
 
 		jabber_iq_send(accept);
 	}
+#endif
 }
 
 void
@@ -1110,60 +1121,67 @@
 			   jabber_jingle_session_get_remote_jid(session));
 	jabber_iq_set_id(result, xmlnode_get_attrib(packet, "id"));
 
-	description = xmlnode_get_child(content, "description");
-	transport = xmlnode_get_child(content, "transport");
+	for (content = xmlnode_get_child(jingle, "content"); content;
+			content = xmlnode_get_next_twin(content)) {
+		description = xmlnode_get_child(content, "description");
+		transport = xmlnode_get_child(content, "transport");
 
-	/* fetch codecs from remote party */
-	purple_debug_info("jabber", "get codecs from session-accept\n");
-	remote_codecs = jabber_jingle_get_codecs(description);
-	purple_debug_info("jabber", "get transport candidates from session accept\n");
-	remote_transports = jabber_jingle_get_candidates(transport);
+		/* fetch codecs from remote party */
+		purple_debug_info("jabber", "get codecs from session-accept\n");
+		remote_codecs = jabber_jingle_get_codecs(description);
+		purple_debug_info("jabber", "get transport candidates from session accept\n");
+		remote_transports = jabber_jingle_get_candidates(transport);
 
-	purple_debug_info("jabber", "Got %d codecs from responder\n",
-			  g_list_length(remote_codecs));
-	purple_debug_info("jabber", "Got %d transport candidates from responder\n",
-			  g_list_length(remote_transports));
+		purple_debug_info("jabber", "Got %d codecs from responder\n",
+				  g_list_length(remote_codecs));
+		purple_debug_info("jabber", "Got %d transport candidates from responder\n",
+				  g_list_length(remote_transports));
 
-	purple_debug_info("jabber", "Setting remote codecs on stream\n");
+		purple_debug_info("jabber", "Setting remote codecs on stream\n");
 
-	purple_media_set_remote_audio_codecs(session->media, 
-					     jabber_jingle_session_get_remote_jid(session),
-					     remote_codecs);
+		purple_media_set_remote_codecs(session->media,
+					       xmlnode_get_attrib(content, "name"),
+					       jabber_jingle_session_get_remote_jid(session),
+					       remote_codecs);
 
-	codec_intersection = purple_media_get_negotiated_audio_codecs(session->media);
-	purple_debug_info("jabber", "codec_intersection contains %d elems\n",
-			  g_list_length(codec_intersection));
-	/* get the top codec */
-	if (g_list_length(codec_intersection) > 0) {
-		top = (FsCodec *) codec_intersection->data;
-		purple_debug_info("jabber", "Found a suitable codec on stream = %d\n",
-				  top->id);
+		codec_intersection = purple_media_get_negotiated_codecs(session->media,
+									xmlnode_get_attrib(content, "name"));
+		purple_debug_info("jabber", "codec_intersection contains %d elems\n",
+				  g_list_length(codec_intersection));
+		/* get the top codec */
+		if (g_list_length(codec_intersection) > 0) {
+			top = (FsCodec *) codec_intersection->data;
+			purple_debug_info("jabber", "Found a suitable codec on stream = %d\n",
+					  top->id);
 
-		/* we have found a suitable codec, but we will not start the stream
-		   just yet, wait for transport negotiation to complete... */
-	}
-	/* if we also got transport candidates, add them to our streams
-	   list of known remote candidates */
-	if (g_list_length(remote_transports) > 0) {
-		purple_media_add_remote_audio_candidates(session->media,
-							 jabber_jingle_session_get_remote_jid(session),
-							 remote_transports);
-		fs_candidate_list_destroy(remote_transports);
-	}
-	if (g_list_length(codec_intersection) == 0 &&
-			g_list_length(remote_transports)) {
-		/* we didn't get any candidates and the codec intersection is empty,
-		   this means this was not a content-accept message and we couldn't
-		   find any suitable codecs, should return error and hang up */
+			/* we have found a suitable codec, but we will not start the stream
+			   just yet, wait for transport negotiation to complete... */
+		}
+		/* if we also got transport candidates, add them to our streams
+		   list of known remote candidates */
+		if (g_list_length(remote_transports) > 0) {
+			purple_media_add_remote_candidates(session->media,
+							   xmlnode_get_attrib(content, "name"),
+							   jabber_jingle_session_get_remote_jid(session),
+							   remote_transports);
+			fs_candidate_list_destroy(remote_transports);
+		}
+		if (g_list_length(codec_intersection) == 0 &&
+				g_list_length(remote_transports)) {
+			/* we didn't get any candidates and the codec intersection is empty,
+			   this means this was not a content-accept message and we couldn't
+			   find any suitable codecs, should return error and hang up */
+
+		}
+
+		fs_codec_list_destroy(codec_intersection);
 
 	}
 
-	g_list_free(codec_intersection);
-
 	if (!strcmp(action, "session-accept")) {
 		purple_media_got_accept(jabber_jingle_session_get_media(session));
 		purple_debug_info("jabber", "Got session-accept, starting stream\n");
-		gst_element_set_state(purple_media_get_audio_pipeline(session->media),
+		gst_element_set_state(purple_media_get_pipeline(session->media),
 				      GST_STATE_PLAYING);
 	}
 
@@ -1204,30 +1222,35 @@
 		purple_debug_error("jabber", "Jingle session with id={%s} already exists\n", sid);
 		return;
 	}
+
 	session = jabber_jingle_session_create_by_id(js, sid);
 
-	/* init media */
-	content = xmlnode_get_child(jingle, "content");
-	if (!content) {
-		purple_debug_error("jabber", "jingle tag must contain content tag\n");
-		/* should send error here */
-		return;
-	}
+	for (content = xmlnode_get_child(jingle, "content"); content;
+			content = xmlnode_get_next_twin(content)) {
+		/* init media */
+		if (!content) {
+			purple_debug_error("jabber", "jingle tag must contain content tag\n");
+			/* should send error here */
+			return;
+		}
 
-	description = xmlnode_get_child(content, "description");
+		description = xmlnode_get_child(content, "description");
 
-	if (!description) {
-		purple_debug_error("jabber", "content tag must contain description tag\n");
-		/* we should create an error iq here */
-		return;
-	}
+		if (!description) {
+			purple_debug_error("jabber", "content tag must contain description tag\n");
+			/* we should create an error iq here */
+			return;
+		}
+
+		transport = xmlnode_get_child(content, "transport");
 
-	transport = xmlnode_get_child(content, "transport");
+		if (!transport) {
+			purple_debug_error("jingle", "content tag must contain transport tag\n");
+			/* we should create an error iq here */
+			return;
+		}
 
-	if (!transport) {
-		purple_debug_error("jingle", "content tag must contain transport tag\n");
-		/* we should create an error iq here */
-		return;
+		jabber_jingle_session_content_create_parse(session, content);
 	}
 
 	if (!jabber_jingle_session_initiate_media_internal(session, initiator, initiator)) {
@@ -1237,12 +1260,31 @@
 		return;
 	}
 
-	jabber_jingle_session_content_create_parse(session, jingle);
+	for (content = xmlnode_get_child(jingle, "content"); content;
+			content = xmlnode_get_next_twin(content)) {
+		/* init media */
+		if (!content) {
+			purple_debug_error("jabber", "jingle tag must contain content tag\n");
+			/* should send error here */
+			return;
+		}
+
+		description = xmlnode_get_child(content, "description");
 
-	codecs = jabber_jingle_get_codecs(description);
+		if (!description) {
+			purple_debug_error("jabber", "content tag must contain description tag\n");
+			/* we should create an error iq here */
+			return;
+		}
+		codecs = jabber_jingle_get_codecs(description);
 
-	purple_media_set_remote_audio_codecs(session->media, initiator, codecs);
-
+		purple_media_set_remote_codecs(session->media,
+					       xmlnode_get_attrib(content, "name"),
+					       initiator, codecs);
+		purple_debug_info("jingle", "codec intersection: %i\n",
+				g_list_length(purple_media_get_negotiated_codecs(session->media,
+				xmlnode_get_attrib(content, "name"))));
+	}
 	jabber_iq_send(jabber_jingle_session_create_ack(js, packet));
 	jabber_iq_send(jabber_jingle_session_create_session_info(session, "ringing"));
 }
@@ -1261,7 +1303,7 @@
 
 	/* maybe we should look at the reasoncode to determine if it was
 	   a hangup or a reject, and call different callbacks to purple_media */
-	gst_element_set_state(purple_media_get_audio_pipeline(session->media), GST_STATE_NULL);
+	gst_element_set_state(purple_media_get_pipeline(session->media), GST_STATE_NULL);
 
 	purple_media_got_hangup(jabber_jingle_session_get_media(session));
 	jabber_iq_send(jabber_jingle_session_create_ack(js, packet));
@@ -1289,9 +1331,10 @@
 
 	/* add candidates to our list of remote candidates */
 	if (g_list_length(remote_candidates) > 0) {
-		purple_media_add_remote_audio_candidates(session->media,
-							 xmlnode_get_attrib(packet, "from"),
-							 remote_candidates);
+		purple_media_add_remote_candidates(session->media,
+						   xmlnode_get_attrib(content, "name"),
+						   xmlnode_get_attrib(packet, "from"),
+						   remote_candidates);
 		fs_candidate_list_destroy(remote_candidates);
 	}
 }
--- a/pidgin/gtkconv.c	Wed Jun 04 19:21:49 2008 +0000
+++ b/pidgin/gtkconv.c	Fri Jun 06 07:43:03 2008 +0000
@@ -7701,7 +7701,7 @@
 	PurpleMedia *media =
 		serv_initiate_media(gc,
 				    purple_conversation_get_name(conv),
-				    PURPLE_MEDIA_AUDIO & PURPLE_MEDIA_VIDEO);
+				    PURPLE_MEDIA_AUDIO | PURPLE_MEDIA_VIDEO);
 
 	purple_media_wait(media);
 }
@@ -7709,19 +7709,10 @@
 static void
 pidgin_conv_new_media_cb(PurpleMediaManager *manager, PurpleMedia *media, gpointer nul)
 {
-	GstElement *sendbin, *sendlevel;
-	GstElement *recvbin, *recvlevel;
-
 	GtkWidget *gtkmedia;
 	PurpleConversation *conv;
 	PidginConversation *gtkconv;
 
-	purple_media_audio_init_src(&sendbin, &sendlevel);
-	purple_media_audio_init_recv(&recvbin, &recvlevel);
-
-	purple_media_set_audio_src(media, sendbin);
-	purple_media_set_audio_sink(media, recvbin);
-
 	conv = purple_conversation_new(PURPLE_CONV_TYPE_IM,
 				       purple_connection_get_account(purple_media_get_connection(media)),
 				       purple_media_get_screenname(media));
@@ -7729,7 +7720,7 @@
 	if (gtkconv->gtkmedia)
 		gtk_widget_destroy(gtkconv->gtkmedia);
 
-	gtkmedia = pidgin_media_new(media, sendlevel, recvlevel);
+	gtkmedia = pidgin_media_new(media);
 	gtk_box_pack_start(GTK_BOX(gtkconv->topvbox), gtkmedia, FALSE, FALSE, 0);
 	gtk_widget_show(gtkmedia);
 	g_signal_connect(G_OBJECT(gtkmedia), "message", G_CALLBACK(pidgin_gtkmedia_message_cb), conv);
--- a/pidgin/gtkmedia.c	Wed Jun 04 19:21:49 2008 +0000
+++ b/pidgin/gtkmedia.c	Fri Jun 06 07:43:03 2008 +0000
@@ -133,13 +133,13 @@
 			"Send level",
 			"The GstElement of this media's send 'level'",
 			GST_TYPE_ELEMENT,
-			G_PARAM_CONSTRUCT_ONLY | G_PARAM_READWRITE));
+			G_PARAM_READWRITE));
 	g_object_class_install_property(gobject_class, PROP_RECV_LEVEL,
 			g_param_spec_object("recv-level",
 			"Receive level",
 			"The GstElement of this media's recv 'level'",
 			GST_TYPE_ELEMENT,
-			G_PARAM_CONSTRUCT_ONLY | G_PARAM_READWRITE));
+			G_PARAM_READWRITE));
 
 	pidgin_media_signals[MESSAGE] = g_signal_new("message", G_TYPE_FROM_CLASS(klass),
 					G_SIGNAL_RUN_LAST, 0, NULL, NULL,
@@ -224,7 +224,7 @@
 static void
 pidgin_media_disconnect_levels(PurpleMedia *media, PidginMedia *gtkmedia)
 {
-	GstElement *element = purple_media_get_audio_pipeline(media);
+	GstElement *element = purple_media_get_pipeline(media);
 	gulong handler_id = g_signal_handler_find(G_OBJECT(gst_pipeline_get_bus(GST_PIPELINE(element))),
 						  G_SIGNAL_MATCH_FUNC | G_SIGNAL_MATCH_DATA, 0, 0, 
 						  NULL, G_CALLBACK(level_message_cb), gtkmedia);
@@ -256,10 +256,40 @@
 static void
 pidgin_media_ready_cb(PurpleMedia *media, PidginMedia *gtkmedia)
 {
-	GstElement *element = purple_media_get_audio_pipeline(media);
+	GstElement *element = purple_media_get_pipeline(media);
+
+	GstElement *audiosendbin, *audiosendlevel;
+	GstElement *audiorecvbin, *audiorecvlevel;
+	GstElement *videosendbin;
+	GstElement *videorecvbin;
+
+	GList *sessions = purple_media_get_session_names(media);
+
+	purple_media_audio_init_src(&audiosendbin, &audiosendlevel);
+	purple_media_audio_init_recv(&audiorecvbin, &audiorecvlevel);
+
+	purple_media_video_init_src(&videosendbin);
+	purple_media_video_init_recv(&videorecvbin);
+
+	for (; sessions; sessions = sessions->next) {
+		if (purple_media_get_session_type(media, sessions->data) == FS_MEDIA_TYPE_AUDIO) {
+			purple_media_set_src(media, sessions->data, audiosendbin);
+			purple_media_set_sink(media, sessions->data, audiorecvbin);
+		} else if (purple_media_get_session_type(media, sessions->data) == FS_MEDIA_TYPE_VIDEO) {
+			purple_media_set_src(media, sessions->data, videosendbin);
+			purple_media_set_sink(media, sessions->data, videorecvbin);
+		}
+	}
+	g_list_free(sessions);
+
+	if (audiosendlevel && audiorecvlevel) {
+		g_object_set(gtkmedia, "send-level", audiosendlevel,
+				       "recv-level", audiorecvlevel,
+				       NULL);
+	}
+
 	gst_bus_add_signal_watch(GST_BUS(gst_pipeline_get_bus(GST_PIPELINE(element))));
 	g_signal_connect(G_OBJECT(gst_pipeline_get_bus(GST_PIPELINE(element))), "message", G_CALLBACK(level_message_cb), gtkmedia);
-	printf("\n\nbus: %p\n", gst_pipeline_get_bus(GST_PIPELINE(element)));
 }
 
 static void
@@ -376,11 +406,10 @@
 }
 
 GtkWidget *
-pidgin_media_new(PurpleMedia *media, GstElement *sendlevel, GstElement *recvlevel)
+pidgin_media_new(PurpleMedia *media)
 {
-	PidginMedia *gtkmedia = g_object_new(pidgin_media_get_type(), "media", media,
-			"send-level", sendlevel,
-			"recv-level", recvlevel, NULL);
+	PidginMedia *gtkmedia = g_object_new(pidgin_media_get_type(),
+					     "media", media, NULL);
 	return GTK_WIDGET(gtkmedia);
 }
 
--- a/pidgin/gtkmedia.h	Wed Jun 04 19:21:49 2008 +0000
+++ b/pidgin/gtkmedia.h	Fri Jun 06 07:43:03 2008 +0000
@@ -59,7 +59,7 @@
 
 GType pidgin_media_get_type(void);
 
-GtkWidget *pidgin_media_new(PurpleMedia *media, GstElement *send_level, GstElement *recv_level);
+GtkWidget *pidgin_media_new(PurpleMedia *media);
 
 G_END_DECLS
 
--- a/pidgin/gtkprefs.c	Wed Jun 04 19:21:49 2008 +0000
+++ b/pidgin/gtkprefs.c	Fri Jun 06 07:43:03 2008 +0000
@@ -2198,6 +2198,7 @@
 				   _("Default"), "gconfvideosrc",
 				   _("Video4Linux"), "v4lsrc",
 				   _("Video4Linux2"), "v4l2src",
+				   _("Video Test Source"), "videotestsrc",
 				   NULL);
 
 	gtk_size_group_add_widget(sg, dd);