Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
docs: Convert gtkdoc comments to markdown
Modernizing the documentation, making it simpler to read an
modify and allowing us to possibly switch to hotdoc in the
future.
  • Loading branch information
Thibault Saunier committed Mar 10, 2017
1 parent a122135 commit 099ac9f
Show file tree
Hide file tree
Showing 143 changed files with 1,214 additions and 1,706 deletions.
9 changes: 5 additions & 4 deletions ext/alsa/gstalsamidisrc.c
Expand Up @@ -20,6 +20,7 @@
*/
/**
* SECTION:element-alsamidisrc
* @title: alsamidisrc
* @see_also: #GstPushSrc
*
* The alsamidisrc element is an element that fetches ALSA MIDI sequencer
Expand All @@ -28,13 +29,13 @@
*
* It can be used to generate notes from a MIDI input device.
*
* <refsect2>
* <title>Example launch line</title>
* ## Example launch line
* |[
* gst-launch -v alsamidisrc ports=129:0 ! fluiddec ! audioconvert ! autoaudiosink
* ]| This pipeline will listen for events from the sequencer device at port 129:0,
* ]|
* This pipeline will listen for events from the sequencer device at port 129:0,
* and generate notes using the fluiddec element.
* </refsect2>
*
*/

#ifdef HAVE_CONFIG_H
Expand Down
10 changes: 6 additions & 4 deletions ext/alsa/gstalsasink.c
Expand Up @@ -22,16 +22,18 @@

/**
* SECTION:element-alsasink
* @title: alsasink
* @see_also: alsasrc
*
* This element renders audio samples using the ALSA audio API.
*
* <refsect2>
* <title>Example pipelines</title>
* ## Example pipelines
* |[
* gst-launch-1.0 -v uridecodebin uri=file:///path/to/audio.ogg ! audioconvert ! audioresample ! autoaudiosink
* ]| Play an Ogg/Vorbis file and output audio via ALSA.
* </refsect2>
* ]|
*
* Play an Ogg/Vorbis file and output audio via ALSA.
*
*/

#ifdef HAVE_CONFIG_H
Expand Down
9 changes: 5 additions & 4 deletions ext/alsa/gstalsasrc.c
Expand Up @@ -21,16 +21,17 @@

/**
* SECTION:element-alsasrc
* @title: alsasrc
* @see_also: alsasink
*
* This element reads data from an audio card using the ALSA API.
*
* <refsect2>
* <title>Example pipelines</title>
* ## Example pipelines
* |[
* gst-launch-1.0 -v alsasrc ! queue ! audioconvert ! vorbisenc ! oggmux ! filesink location=alsasrc.ogg
* ]| Record from a sound card using ALSA and encode to Ogg/Vorbis.
* </refsect2>
* ]|
* Record from a sound card using ALSA and encode to Ogg/Vorbis.
*
*/

#ifdef HAVE_CONFIG_H
Expand Down
9 changes: 5 additions & 4 deletions ext/ogg/gstoggdemux.c
Expand Up @@ -21,16 +21,17 @@

/**
* SECTION:element-oggdemux
* @title: oggdemux
* @see_also: <link linkend="gst-plugins-base-plugins-oggmux">oggmux</link>
*
* This element demuxes ogg files into their encoded audio and video components.
*
* <refsect2>
* <title>Example pipelines</title>
* ## Example pipelines
* |[
* gst-launch-1.0 -v filesrc location=test.ogg ! oggdemux ! vorbisdec ! audioconvert ! audioresample ! autoaudiosink
* ]| Decodes a vorbis audio stream stored inside an ogg container and plays it.
* </refsect2>
* ]|
* Decodes a vorbis audio stream stored inside an ogg container and plays it.
*
*/


Expand Down
19 changes: 10 additions & 9 deletions ext/ogg/gstoggmux.c
Expand Up @@ -20,17 +20,18 @@

/**
* SECTION:element-oggmux
* @title: oggmux
* @see_also: <link linkend="gst-plugins-base-plugins-oggdemux">oggdemux</link>
*
* This element merges streams (audio and video) into ogg files.
*
* <refsect2>
* <title>Example pipelines</title>
* ## Example pipelines
* |[
* gst-launch-1.0 v4l2src num-buffers=500 ! video/x-raw,width=320,height=240 ! videoconvert ! videorate ! theoraenc ! oggmux ! filesink location=video.ogg
* ]| Encodes a video stream captured from a v4l2-compatible camera to Ogg/Theora
* ]|
* Encodes a video stream captured from a v4l2-compatible camera to Ogg/Theora
* (the encoding will stop automatically after 500 frames)
* </refsect2>
*
*/

#ifdef HAVE_CONFIG_H
Expand Down Expand Up @@ -968,14 +969,14 @@ gst_ogg_mux_decorate_buffer (GstOggMux * ogg_mux, GstOggPadData * pad,


/* make sure at least one buffer is queued on all pads, two if possible
*
*
* if pad->buffer == NULL, pad->next_buffer != NULL, then
* we do not know if the buffer is the last or not
* if pad->buffer != NULL, pad->next_buffer != NULL, then
* pad->buffer is not the last buffer for the pad
* if pad->buffer != NULL, pad->next_buffer == NULL, then
* pad->buffer if the last buffer for the pad
*
*
* returns a pointer to an oggpad that holds the best buffer, or
* NULL when no pad was usable. "best" means the buffer marked
* with the lowest timestamp. If best->buffer == NULL then either
Expand Down Expand Up @@ -1409,7 +1410,7 @@ gst_ogg_mux_make_fistail (GstOggMux * mux, ogg_stream_state * os)
* page that allows decoders to identify the type of the stream.
* After that we need to write out all extra info for the decoders.
* In the case of a codec that also needs data as configuration, we can
* find that info in the streamcaps.
* find that info in the streamcaps.
* After writing the headers we must start a new page for the data.
*/
static GstFlowReturn
Expand Down Expand Up @@ -2034,11 +2035,11 @@ gst_ogg_mux_send_start_events (GstOggMux * ogg_mux, GstCollectPads * pads)
}

/* This function is called when there is data on all pads.
*
*
* It finds a pad to pull on, this is done by looking at the buffers
* to decide which one to use, and using the 'oldest' one first. It then calls
* gst_ogg_mux_process_best_pad() to process as much data as possible.
*
*
* If all the pads have received EOS, it flushes out all data by continually
* getting the best pad and calling gst_ogg_mux_process_best_pad() until they
* are all empty, and then sends EOS.
Expand Down
9 changes: 5 additions & 4 deletions ext/opus/gstopusdec.c
Expand Up @@ -26,16 +26,17 @@

/**
* SECTION:element-opusdec
* @title: opusdec
* @see_also: opusenc, oggdemux
*
* This element decodes a OPUS stream to raw integer audio.
*
* <refsect2>
* <title>Example pipelines</title>
* ## Example pipelines
* |[
* gst-launch-1.0 -v filesrc location=opus.ogg ! oggdemux ! opusdec ! audioconvert ! audioresample ! alsasink
* ]| Decode an Ogg/Opus file. To create an Ogg/Opus file refer to the documentation of opusenc.
* </refsect2>
* ]|
* Decode an Ogg/Opus file. To create an Ogg/Opus file refer to the documentation of opusenc.
*
*/

#ifdef HAVE_CONFIG_H
Expand Down
9 changes: 5 additions & 4 deletions ext/opus/gstopusenc.c
Expand Up @@ -25,16 +25,17 @@

/**
* SECTION:element-opusenc
* @title: opusenc
* @see_also: opusdec, oggmux
*
* This element encodes raw audio to OPUS.
*
* <refsect2>
* <title>Example pipelines</title>
* ## Example pipelines
* |[
* gst-launch-1.0 -v audiotestsrc wave=sine num-buffers=100 ! audioconvert ! opusenc ! oggmux ! filesink location=sine.ogg
* ]| Encode a test sine signal to Ogg/OPUS.
* </refsect2>
* ]|
* Encode a test sine signal to Ogg/OPUS.
*
*/

#ifdef HAVE_CONFIG_H
Expand Down
12 changes: 7 additions & 5 deletions ext/pango/gstclockoverlay.c
Expand Up @@ -20,6 +20,7 @@

/**
* SECTION:element-clockoverlay
* @title: clockoverlay
* @see_also: #GstBaseTextOverlay, #GstTimeOverlay
*
* This element overlays the current clock time on top of a video
Expand All @@ -28,18 +29,19 @@
* time is displayed in the top left corner of the picture, with some
* padding to the left and to the top.
*
* <refsect2>
* <title>Example launch lines</title>
* ## Example launch lines
* |[
* gst-launch-1.0 -v videotestsrc ! clockoverlay ! autovideosink
* ]| Display the current wall clock time in the top left corner of the video picture
* ]|
* Display the current wall clock time in the top left corner of the video picture
* |[
* gst-launch-1.0 -v videotestsrc ! clockoverlay halignment=right valignment=bottom text="Edge City" shaded-background=true font-desc="Sans, 36" ! videoconvert ! autovideosink
* ]| Another pipeline that displays the current time with some leading
* ]|
* Another pipeline that displays the current time with some leading
* text in the bottom right corner of the video picture, with the background
* of the text being shaded in order to make it more legible on top of a
* bright video background.
* </refsect2>
*
*/

#ifdef HAVE_CONFIG_H
Expand Down
15 changes: 8 additions & 7 deletions ext/pango/gsttextoverlay.c
Expand Up @@ -25,6 +25,7 @@

/**
* SECTION:element-textoverlay
* @title: textoverlay
* @see_also: #GstTextRender, #GstTextOverlay, #GstTimeOverlay, #GstSubParse
*
* This plugin renders text on top of a video stream. This can be either
Expand All @@ -37,18 +38,19 @@
* The text can contain newline characters and text wrapping is enabled by
* default.
*
* <refsect2>
* <title>Example launch lines</title>
* ## Example launch lines
* |[
* gst-launch-1.0 -v gst-launch-1.0 videotestsrc ! textoverlay text="Room A" valignment=top halignment=left font-desc="Sans, 72" ! autovideosink
* ]| Here is a simple pipeline that displays a static text in the top left
* ]|
* Here is a simple pipeline that displays a static text in the top left
* corner of the video picture
* |[
* gst-launch-1.0 -v filesrc location=subtitles.srt ! subparse ! txt. videotestsrc ! timeoverlay ! textoverlay name=txt shaded-background=yes ! autovideosink
* ]| Here is another pipeline that displays subtitles from an .srt subtitle
* ]|
* Here is another pipeline that displays subtitles from an .srt subtitle
* file, centered at the bottom of the picture and with a rectangular shading
* around the text in the background:
* <para>
*
* If you do not have such a subtitle file, create one looking like this
* in a text editor:
* |[
Expand All @@ -66,8 +68,7 @@
* Uh? What are you talking about?
* I don&apos;t understand (18-62s)
* ]|
* </para>
* </refsect2>
*
*/

#ifdef HAVE_CONFIG_H
Expand Down
8 changes: 4 additions & 4 deletions ext/pango/gsttextrender.c
Expand Up @@ -22,21 +22,21 @@

/**
* SECTION:element-textrender
* @title: textrender
* @see_also: #GstTextOverlay
*
* This plugin renders text received on the text sink pad to a video
* buffer (retaining the alpha channel), so it can later be overlayed
* on top of video streams using other elements.
*
* The text can contain newline characters. (FIXME: What about text
* The text can contain newline characters. (FIXME: What about text
* wrapping? It does not make sense in this context)
*
* <refsect2>
* <title>Example launch lines</title>
* ## Example launch lines
* |[
* gst-launch-1.0 -v filesrc location=subtitles.srt ! subparse ! textrender ! videoconvert ! autovideosink
* ]|
* </refsect2>
*
*/

#ifdef HAVE_CONFIG_H
Expand Down
10 changes: 6 additions & 4 deletions ext/pango/gsttimeoverlay.c
Expand Up @@ -20,6 +20,7 @@

/**
* SECTION:element-timeoverlay
* @title: timeoverlay
* @see_also: #GstBaseTextOverlay, #GstClockOverlay
*
* This element overlays the buffer time stamps of a video stream on
Expand All @@ -28,17 +29,18 @@
* time stamp is displayed in the top left corner of the picture, with some
* padding to the left and to the top.
*
* <refsect2>
* |[
* gst-launch-1.0 -v videotestsrc ! timeoverlay ! autovideosink
* ]| Display the time stamps in the top left corner of the video picture.
* ]|
* Display the time stamps in the top left corner of the video picture.
* |[
* gst-launch-1.0 -v videotestsrc ! timeoverlay halignment=right valignment=bottom text="Stream time:" shaded-background=true font-desc="Sans, 24" ! autovideosink
* ]| Another pipeline that displays the time stamps with some leading
* ]|
* Another pipeline that displays the time stamps with some leading
* text in the bottom right corner of the video picture, with the background
* of the text being shaded in order to make it more legible on top of a
* bright video background.
* </refsect2>
*
*/

#ifdef HAVE_CONFIG_H
Expand Down
9 changes: 5 additions & 4 deletions ext/theora/gsttheoradec.c
Expand Up @@ -22,20 +22,21 @@

/**
* SECTION:element-theoradec
* @title: theoradec
* @see_also: theoraenc, oggdemux
*
* This element decodes theora streams into raw video
* <ulink url="http://www.theora.org/">Theora</ulink> is a royalty-free
* video codec maintained by the <ulink url="http://www.xiph.org/">Xiph.org
* Foundation</ulink>, based on the VP3 codec.
*
* <refsect2>
* <title>Example pipeline</title>
* ## Example pipeline
* |[
* gst-launch-1.0 -v filesrc location=videotestsrc.ogg ! oggdemux ! theoradec ! videoconvert ! videoscale ! autovideosink
* ]| This example pipeline will decode an ogg stream and decodes the theora video in it.
* ]|
* This example pipeline will decode an ogg stream and decodes the theora video in it.
* Refer to the theoraenc example to create the ogg file.
* </refsect2>
*
*/

#ifdef HAVE_CONFIG_H
Expand Down
9 changes: 5 additions & 4 deletions ext/theora/gsttheoraenc.c
Expand Up @@ -22,6 +22,7 @@

/**
* SECTION:element-theoraenc
* @title: theoraenc
* @see_also: theoradec, oggmux
*
* This element encodes raw video into a Theora stream.
Expand All @@ -45,14 +46,14 @@
* A videorate element is often required in front of theoraenc, especially
* when transcoding and when putting Theora into the Ogg container.
*
* <refsect2>
* <title>Example pipeline</title>
* ## Example pipeline
* |[
* gst-launch-1.0 -v videotestsrc num-buffers=500 ! video/x-raw,width=1280,height=720 ! queue ! progressreport ! theoraenc ! oggmux ! filesink location=videotestsrc.ogg
* ]| This example pipeline will encode a test video source to theora muxed in an
* ]|
* This example pipeline will encode a test video source to theora muxed in an
* ogg container. Refer to the theoradec documentation to decode the create
* stream.
* </refsect2>
*
*/

#ifdef HAVE_CONFIG_H
Expand Down

0 comments on commit 099ac9f

Please sign in to comment.