summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--ffmpeg.c6
-rw-r--r--libavcodec/dvdsub.c4
-rw-r--r--libavcodec/snow.c12
-rw-r--r--libavcodec/utils.c4
-rw-r--r--libavcodec/vc9.c12
-rw-r--r--libavcodec/vorbis.c2
-rw-r--r--output_example.c22
-rw-r--r--vhook/ppm.c12
8 files changed, 37 insertions, 37 deletions
diff --git a/ffmpeg.c b/ffmpeg.c
index cb58364816..f3f2817030 100644
--- a/ffmpeg.c
+++ b/ffmpeg.c
@@ -3790,7 +3790,7 @@ static void show_formats(void)
exit(1);
}
-void parse_matrix_coeffs(uint16_t *dest, const char *str)
+static void parse_matrix_coeffs(uint16_t *dest, const char *str)
{
int i;
const char *p = str;
@@ -3807,13 +3807,13 @@ void parse_matrix_coeffs(uint16_t *dest, const char *str)
}
}
-void opt_inter_matrix(const char *arg)
+static void opt_inter_matrix(const char *arg)
{
inter_matrix = av_mallocz(sizeof(uint16_t) * 64);
parse_matrix_coeffs(inter_matrix, arg);
}
-void opt_intra_matrix(const char *arg)
+static void opt_intra_matrix(const char *arg)
{
intra_matrix = av_mallocz(sizeof(uint16_t) * 64);
parse_matrix_coeffs(intra_matrix, arg);
diff --git a/libavcodec/dvdsub.c b/libavcodec/dvdsub.c
index bf05c1a8c8..7a07587133 100644
--- a/libavcodec/dvdsub.c
+++ b/libavcodec/dvdsub.c
@@ -28,12 +28,12 @@ static int dvdsub_init_decoder(AVCodecContext *avctx)
return 0;
}
-uint16_t getbe16(const uint8_t *p)
+static uint16_t getbe16(const uint8_t *p)
{
return (p[0] << 8) | p[1];
}
-int get_nibble(const uint8_t *buf, int nibble_offset)
+static int get_nibble(const uint8_t *buf, int nibble_offset)
{
return (buf[nibble_offset >> 1] >> ((1 - (nibble_offset & 1)) << 2)) & 0xf;
}
diff --git a/libavcodec/snow.c b/libavcodec/snow.c
index d9f58e2d02..1edd387d67 100644
--- a/libavcodec/snow.c
+++ b/libavcodec/snow.c
@@ -1576,7 +1576,7 @@ static void spatial_compose97i(DWTELEM *buffer, int width, int height, int strid
spatial_compose97i_dy(&cs, buffer, width, height, stride);
}
-void ff_spatial_idwt_buffered_init(dwt_compose_t *cs, slice_buffer * sb, int width, int height, int stride_line, int type, int decomposition_count){
+static void ff_spatial_idwt_buffered_init(dwt_compose_t *cs, slice_buffer * sb, int width, int height, int stride_line, int type, int decomposition_count){
int level;
for(level=decomposition_count-1; level>=0; level--){
switch(type){
@@ -1589,7 +1589,7 @@ void ff_spatial_idwt_buffered_init(dwt_compose_t *cs, slice_buffer * sb, int wid
}
}
-void ff_spatial_idwt_init(dwt_compose_t *cs, DWTELEM *buffer, int width, int height, int stride, int type, int decomposition_count){
+static void ff_spatial_idwt_init(dwt_compose_t *cs, DWTELEM *buffer, int width, int height, int stride, int type, int decomposition_count){
int level;
for(level=decomposition_count-1; level>=0; level--){
switch(type){
@@ -1601,7 +1601,7 @@ void ff_spatial_idwt_init(dwt_compose_t *cs, DWTELEM *buffer, int width, int hei
}
}
-void ff_spatial_idwt_slice(dwt_compose_t *cs, DWTELEM *buffer, int width, int height, int stride, int type, int decomposition_count, int y){
+static void ff_spatial_idwt_slice(dwt_compose_t *cs, DWTELEM *buffer, int width, int height, int stride, int type, int decomposition_count, int y){
const int support = type==1 ? 3 : 5;
int level;
if(type==2) return;
@@ -1619,7 +1619,7 @@ void ff_spatial_idwt_slice(dwt_compose_t *cs, DWTELEM *buffer, int width, int he
}
}
-void ff_spatial_idwt_buffered_slice(dwt_compose_t *cs, slice_buffer * slice_buf, int width, int height, int stride_line, int type, int decomposition_count, int y){
+static void ff_spatial_idwt_buffered_slice(dwt_compose_t *cs, slice_buffer * slice_buf, int width, int height, int stride_line, int type, int decomposition_count, int y){
const int support = type==1 ? 3 : 5;
int level;
if(type==2) return;
@@ -1637,7 +1637,7 @@ void ff_spatial_idwt_buffered_slice(dwt_compose_t *cs, slice_buffer * slice_buf,
}
}
-void ff_spatial_idwt(DWTELEM *buffer, int width, int height, int stride, int type, int decomposition_count){
+static void ff_spatial_idwt(DWTELEM *buffer, int width, int height, int stride, int type, int decomposition_count){
if(type==2){
int level;
for(level=decomposition_count-1; level>=0; level--)
@@ -3776,7 +3776,7 @@ static int decode_header(SnowContext *s){
return 0;
}
-static void init_qexp(){
+static void init_qexp(void){
int i;
double v=128;
diff --git a/libavcodec/utils.c b/libavcodec/utils.c
index 3e450cbeff..04d5081c7d 100644
--- a/libavcodec/utils.c
+++ b/libavcodec/utils.c
@@ -149,9 +149,9 @@ void av_free_static(void)
* Call av_free_static automatically before it's too late
*/
-static void do_free() __attribute__ ((destructor));
+static void do_free(void) __attribute__ ((destructor));
-static void do_free()
+static void do_free(void)
{
av_free_static();
}
diff --git a/libavcodec/vc9.c b/libavcodec/vc9.c
index ea0ebae0b7..465edda103 100644
--- a/libavcodec/vc9.c
+++ b/libavcodec/vc9.c
@@ -929,7 +929,7 @@ static int advanced_entry_point_process(AVCodecContext *avctx, GetBitContext *gb
* @todo TODO: Take into account stride
* @todo TODO: Allow use of external buffers ?
*/
-int alloc_bitplane(BitPlane *bp, int width, int height)
+static int alloc_bitplane(BitPlane *bp, int width, int height)
{
if (!bp || bp->width<0 || bp->height<0) return -1;
bp->data = (uint8_t*)av_malloc(width*height);
@@ -942,7 +942,7 @@ int alloc_bitplane(BitPlane *bp, int width, int height)
/** Free the bitplane's buffer
* @param bp Bitplane which buffer is to free
*/
-void free_bitplane(BitPlane *bp)
+static void free_bitplane(BitPlane *bp)
{
bp->width = bp->stride = bp->height = 0;
if (bp->data) av_freep(&bp->data);
@@ -1832,7 +1832,7 @@ static inline int vc9_pred_dc(MpegEncContext *s, int n,
* @todo TODO: Process the blocks
* @todo TODO: Use M$ MPEG-4 cbp prediction
*/
-int vc9_decode_block(VC9Context *v, DCTELEM block[64], int n, int coded, int mquant)
+static int vc9_decode_block(VC9Context *v, DCTELEM block[64], int n, int coded, int mquant)
{
GetBitContext *gb = &v->s.gb;
MpegEncContext *s = &v->s;
@@ -1979,7 +1979,7 @@ static inline int vc9_coded_block_pred(MpegEncContext * s, int n, uint8_t **code
/** Decode one I-frame MB (in Simple/Main profile)
* @todo TODO: Extend to AP
*/
-int vc9_decode_i_mb(VC9Context *v, DCTELEM block[6][64])
+static int vc9_decode_i_mb(VC9Context *v, DCTELEM block[6][64])
{
int i, cbp, val;
uint8_t *coded_val;
@@ -2013,7 +2013,7 @@ int vc9_decode_i_mb(VC9Context *v, DCTELEM block[6][64])
* @todo TODO: Extend to AP
* @fixme FIXME: DC value for inter blocks not set
*/
-int vc9_decode_p_mb(VC9Context *v, DCTELEM block[6][64])
+static int vc9_decode_p_mb(VC9Context *v, DCTELEM block[6][64])
{
MpegEncContext *s = &v->s;
GetBitContext *gb = &s->gb;
@@ -2160,7 +2160,7 @@ int vc9_decode_p_mb(VC9Context *v, DCTELEM block[6][64])
* @warning XXX: Used for decoding BI MBs
* @fixme FIXME: DC value for inter blocks not set
*/
-int vc9_decode_b_mb(VC9Context *v, DCTELEM block[6][64])
+static int vc9_decode_b_mb(VC9Context *v, DCTELEM block[6][64])
{
MpegEncContext *s = &v->s;
GetBitContext *gb = &v->s.gb;
diff --git a/libavcodec/vorbis.c b/libavcodec/vorbis.c
index 72b58ad78b..01f1301e51 100644
--- a/libavcodec/vorbis.c
+++ b/libavcodec/vorbis.c
@@ -46,7 +46,7 @@
/**
* reads 0-32 bits when using the ALT_BITSTREAM_READER_LE bitstream reader
*/
-unsigned int get_bits_long_le(GetBitContext *s, int n){
+static unsigned int get_bits_long_le(GetBitContext *s, int n){
if(n<=17) return get_bits(s, n);
else{
int ret= get_bits(s, 16);
diff --git a/output_example.c b/output_example.c
index d1cb754a0b..636ad6ee40 100644
--- a/output_example.c
+++ b/output_example.c
@@ -51,7 +51,7 @@ int audio_input_frame_size;
/*
* add an audio output stream
*/
-AVStream *add_audio_stream(AVFormatContext *oc, int codec_id)
+static AVStream *add_audio_stream(AVFormatContext *oc, int codec_id)
{
AVCodecContext *c;
AVStream *st;
@@ -73,7 +73,7 @@ AVStream *add_audio_stream(AVFormatContext *oc, int codec_id)
return st;
}
-void open_audio(AVFormatContext *oc, AVStream *st)
+static void open_audio(AVFormatContext *oc, AVStream *st)
{
AVCodecContext *c;
AVCodec *codec;
@@ -124,7 +124,7 @@ void open_audio(AVFormatContext *oc, AVStream *st)
/* prepare a 16 bit dummy audio frame of 'frame_size' samples and
'nb_channels' channels */
-void get_audio_frame(int16_t *samples, int frame_size, int nb_channels)
+static void get_audio_frame(int16_t *samples, int frame_size, int nb_channels)
{
int j, i, v;
int16_t *q;
@@ -139,7 +139,7 @@ void get_audio_frame(int16_t *samples, int frame_size, int nb_channels)
}
}
-void write_audio_frame(AVFormatContext *oc, AVStream *st)
+static void write_audio_frame(AVFormatContext *oc, AVStream *st)
{
AVCodecContext *c;
AVPacket pkt;
@@ -163,7 +163,7 @@ void write_audio_frame(AVFormatContext *oc, AVStream *st)
}
}
-void close_audio(AVFormatContext *oc, AVStream *st)
+static void close_audio(AVFormatContext *oc, AVStream *st)
{
avcodec_close(st->codec);
@@ -179,7 +179,7 @@ uint8_t *video_outbuf;
int frame_count, video_outbuf_size;
/* add a video output stream */
-AVStream *add_video_stream(AVFormatContext *oc, int codec_id)
+static AVStream *add_video_stream(AVFormatContext *oc, int codec_id)
{
AVCodecContext *c;
AVStream *st;
@@ -224,7 +224,7 @@ AVStream *add_video_stream(AVFormatContext *oc, int codec_id)
return st;
}
-AVFrame *alloc_picture(int pix_fmt, int width, int height)
+static AVFrame *alloc_picture(int pix_fmt, int width, int height)
{
AVFrame *picture;
uint8_t *picture_buf;
@@ -244,7 +244,7 @@ AVFrame *alloc_picture(int pix_fmt, int width, int height)
return picture;
}
-void open_video(AVFormatContext *oc, AVStream *st)
+static void open_video(AVFormatContext *oc, AVStream *st)
{
AVCodec *codec;
AVCodecContext *c;
@@ -293,7 +293,7 @@ void open_video(AVFormatContext *oc, AVStream *st)
}
/* prepare a dummy image */
-void fill_yuv_image(AVFrame *pict, int frame_index, int width, int height)
+static void fill_yuv_image(AVFrame *pict, int frame_index, int width, int height)
{
int x, y, i;
@@ -315,7 +315,7 @@ void fill_yuv_image(AVFrame *pict, int frame_index, int width, int height)
}
}
-void write_video_frame(AVFormatContext *oc, AVStream *st)
+static void write_video_frame(AVFormatContext *oc, AVStream *st)
{
int out_size, ret;
AVCodecContext *c;
@@ -380,7 +380,7 @@ void write_video_frame(AVFormatContext *oc, AVStream *st)
frame_count++;
}
-void close_video(AVFormatContext *oc, AVStream *st)
+static void close_video(AVFormatContext *oc, AVStream *st)
{
avcodec_close(st->codec);
av_free(picture->data[0]);
diff --git a/vhook/ppm.c b/vhook/ppm.c
index a9de8c9d4f..ef5fd837a1 100644
--- a/vhook/ppm.c
+++ b/vhook/ppm.c
@@ -40,7 +40,7 @@ rwpipe;
/** Create a bidirectional pipe for the given command.
*/
-rwpipe *rwpipe_open( int argc, char *argv[] )
+static rwpipe *rwpipe_open( int argc, char *argv[] )
{
rwpipe *this = av_mallocz( sizeof( rwpipe ) );
@@ -94,7 +94,7 @@ rwpipe *rwpipe_open( int argc, char *argv[] )
/** Read data from the pipe.
*/
-FILE *rwpipe_reader( rwpipe *this )
+static FILE *rwpipe_reader( rwpipe *this )
{
if ( this != NULL )
return this->reader;
@@ -105,7 +105,7 @@ FILE *rwpipe_reader( rwpipe *this )
/** Write data to the pipe.
*/
-FILE *rwpipe_writer( rwpipe *this )
+static FILE *rwpipe_writer( rwpipe *this )
{
if ( this != NULL )
return this->writer;
@@ -116,7 +116,7 @@ FILE *rwpipe_writer( rwpipe *this )
/* Read a number from the pipe - assumes PNM style headers.
*/
-int rwpipe_read_number( rwpipe *rw )
+static int rwpipe_read_number( rwpipe *rw )
{
int value = 0;
int c = 0;
@@ -147,7 +147,7 @@ int rwpipe_read_number( rwpipe *rw )
/** Read a PPM P6 header.
*/
-int rwpipe_read_ppm_header( rwpipe *rw, int *width, int *height )
+static int rwpipe_read_ppm_header( rwpipe *rw, int *width, int *height )
{
char line[ 3 ];
FILE *in = rwpipe_reader( rw );
@@ -167,7 +167,7 @@ int rwpipe_read_ppm_header( rwpipe *rw, int *width, int *height )
/** Close the pipe and process.
*/
-void rwpipe_close( rwpipe *this )
+static void rwpipe_close( rwpipe *this )
{
if ( this != NULL )
{