project
stringclasses 2
values | commit_id
stringlengths 40
40
| target
int64 0
1
| func
stringlengths 26
142k
| idx
int64 0
27.3k
|
---|---|---|---|---|
FFmpeg | c11d3634b07b4aa71f75478aa1bcb63b0c22e030 | 1 | static void srt_to_ass(AVCodecContext *avctx, AVBPrint *dst,
const char *in, int x1, int y1, int x2, int y2)
{
if (x1 >= 0 && y1 >= 0) {
/* XXX: here we rescale coordinate assuming they are in DVD resolution
* (720x480) since we don't have anything better */
if (x2 >= 0 && y2 >= 0 && (x2 != x1 || y2 != y1) && x2 >= x1 && y2 >= y1) {
/* text rectangle defined, write the text at the center of the rectangle */
const int cx = x1 + (x2 - x1)/2;
const int cy = y1 + (y2 - y1)/2;
const int scaled_x = cx * ASS_DEFAULT_PLAYRESX / 720;
const int scaled_y = cy * ASS_DEFAULT_PLAYRESY / 480;
av_bprintf(dst, "{\\an5}{\\pos(%d,%d)}", scaled_x, scaled_y);
} else {
/* only the top left corner, assume the text starts in that corner */
const int scaled_x = x1 * ASS_DEFAULT_PLAYRESX / 720;
const int scaled_y = y1 * ASS_DEFAULT_PLAYRESY / 480;
av_bprintf(dst, "{\\an1}{\\pos(%d,%d)}", scaled_x, scaled_y);
}
}
ff_htmlmarkup_to_ass(avctx, dst, in);
}
| 5,063 |
qemu | f1d3b99154138741161fc52f5a8c373bf71613c6 | 1 | static void mmio_basic(void)
{
QVirtioMMIODevice *dev;
QVirtQueue *vq;
QGuestAllocator *alloc;
int n_size = TEST_IMAGE_SIZE / 2;
uint64_t capacity;
arm_test_start();
dev = qvirtio_mmio_init_device(MMIO_DEV_BASE_ADDR, MMIO_PAGE_SIZE);
g_assert(dev != NULL);
g_assert_cmphex(dev->vdev.device_type, ==, VIRTIO_ID_BLOCK);
qvirtio_reset(&qvirtio_mmio, &dev->vdev);
qvirtio_set_acknowledge(&qvirtio_mmio, &dev->vdev);
qvirtio_set_driver(&qvirtio_mmio, &dev->vdev);
alloc = generic_alloc_init(MMIO_RAM_ADDR, MMIO_RAM_SIZE, MMIO_PAGE_SIZE);
vq = qvirtqueue_setup(&qvirtio_mmio, &dev->vdev, alloc, 0);
test_basic(&qvirtio_mmio, &dev->vdev, alloc, vq,
QVIRTIO_MMIO_DEVICE_SPECIFIC);
qmp("{ 'execute': 'block_resize', 'arguments': { 'device': 'drive0', "
" 'size': %d } }", n_size);
qvirtio_wait_queue_isr(&qvirtio_mmio, &dev->vdev, vq,
QVIRTIO_BLK_TIMEOUT_US);
capacity = qvirtio_config_readq(&qvirtio_mmio, &dev->vdev,
QVIRTIO_MMIO_DEVICE_SPECIFIC);
g_assert_cmpint(capacity, ==, n_size / 512);
/* End test */
guest_free(alloc, vq->desc);
generic_alloc_uninit(alloc);
g_free(dev);
test_end();
}
| 5,064 |
FFmpeg | 9e7b62f0fb7462a902330fcc82cf596388f0187b | 0 | int ff_twinvq_decode_frame(AVCodecContext *avctx, void *data,
int *got_frame_ptr, AVPacket *avpkt)
{
AVFrame *frame = data;
const uint8_t *buf = avpkt->data;
int buf_size = avpkt->size;
TwinVQContext *tctx = avctx->priv_data;
const TwinVQModeTab *mtab = tctx->mtab;
float **out = NULL;
int ret;
/* get output buffer */
if (tctx->discarded_packets >= 2) {
frame->nb_samples = mtab->size;
if ((ret = ff_get_buffer(avctx, frame, 0)) < 0) {
av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
return ret;
}
out = (float **)frame->extended_data;
}
if ((ret = tctx->read_bitstream(avctx, tctx, buf, buf_size)) < 0)
return ret;
read_and_decode_spectrum(tctx, tctx->spectrum, tctx->bits.ftype);
imdct_output(tctx, tctx->bits.ftype, tctx->bits.window_type, out);
FFSWAP(float *, tctx->curr_frame, tctx->prev_frame);
if (tctx->discarded_packets < 2) {
tctx->discarded_packets++;
*got_frame_ptr = 0;
return buf_size;
}
*got_frame_ptr = 1;
return buf_size;
}
| 5,065 |
FFmpeg | d4f7d8386693beb987382ece8bb7499955620388 | 0 | static int split_field_ref_list(Picture *dest, int dest_len,
Picture *src, int src_len,
int parity, int long_i){
int i = split_field_half_ref_list(dest, dest_len, src, long_i, parity);
dest += i;
dest_len -= i;
i += split_field_half_ref_list(dest, dest_len, src + long_i,
src_len - long_i, parity);
return i;
}
| 5,067 |
FFmpeg | 8bb7d97be0cda3c944956c79c50d0cf6f1efb7b9 | 0 | static int fourxm_read_packet(AVFormatContext *s,
AVPacket *pkt)
{
FourxmDemuxContext *fourxm = s->priv_data;
ByteIOContext *pb = s->pb;
unsigned int fourcc_tag;
unsigned int size, out_size;
int ret = 0;
unsigned int track_number;
int packet_read = 0;
unsigned char header[8];
int audio_frame_count;
while (!packet_read) {
if ((ret = get_buffer(s->pb, header, 8)) < 0)
return ret;
fourcc_tag = AV_RL32(&header[0]);
size = AV_RL32(&header[4]);
if (url_feof(pb))
return AVERROR(EIO);
switch (fourcc_tag) {
case LIST_TAG:
/* this is a good time to bump the video pts */
fourxm->video_pts ++;
/* skip the LIST-* tag and move on to the next fourcc */
get_le32(pb);
break;
case ifrm_TAG:
case pfrm_TAG:
case cfrm_TAG:
case ifr2_TAG:
case pfr2_TAG:
case cfr2_TAG:
/* allocate 8 more bytes than 'size' to account for fourcc
* and size */
if (size + 8 < size || av_new_packet(pkt, size + 8))
return AVERROR(EIO);
pkt->stream_index = fourxm->video_stream_index;
pkt->pts = fourxm->video_pts;
pkt->pos = url_ftell(s->pb);
memcpy(pkt->data, header, 8);
ret = get_buffer(s->pb, &pkt->data[8], size);
if (ret < 0){
av_free_packet(pkt);
}else
packet_read = 1;
break;
case snd__TAG:
track_number = get_le32(pb);
out_size= get_le32(pb);
size-=8;
if (track_number < fourxm->track_count) {
ret= av_get_packet(s->pb, pkt, size);
if(ret<0)
return AVERROR(EIO);
pkt->stream_index =
fourxm->tracks[track_number].stream_index;
pkt->pts = fourxm->tracks[track_number].audio_pts;
packet_read = 1;
/* pts accounting */
audio_frame_count = size;
if (fourxm->tracks[track_number].adpcm)
audio_frame_count -=
2 * (fourxm->tracks[track_number].channels);
audio_frame_count /=
fourxm->tracks[track_number].channels;
if (fourxm->tracks[track_number].adpcm){
audio_frame_count *= 2;
}else
audio_frame_count /=
(fourxm->tracks[track_number].bits / 8);
fourxm->tracks[track_number].audio_pts += audio_frame_count;
} else {
url_fseek(pb, size, SEEK_CUR);
}
break;
default:
url_fseek(pb, size, SEEK_CUR);
break;
}
}
return ret;
}
| 5,068 |
FFmpeg | 5a840f636491fa52a003fb8f674f6db39d5edd66 | 0 | SwsFunc ff_yuv2rgb_init_mmx(SwsContext *c)
{
int cpu_flags = av_get_cpu_flags();
if (c->srcFormat != PIX_FMT_YUV420P &&
c->srcFormat != PIX_FMT_YUVA420P)
return NULL;
#if HAVE_MMX2
if (cpu_flags & AV_CPU_FLAG_MMX2) {
switch (c->dstFormat) {
case PIX_FMT_RGB24: return yuv420_rgb24_MMX2;
case PIX_FMT_BGR24: return yuv420_bgr24_MMX2;
}
}
#endif
if (cpu_flags & AV_CPU_FLAG_MMX) {
switch (c->dstFormat) {
case PIX_FMT_RGB32:
if (c->srcFormat == PIX_FMT_YUVA420P) {
#if HAVE_7REGS && CONFIG_SWSCALE_ALPHA
return yuva420_rgb32_MMX;
#endif
break;
} else return yuv420_rgb32_MMX;
case PIX_FMT_BGR32:
if (c->srcFormat == PIX_FMT_YUVA420P) {
#if HAVE_7REGS && CONFIG_SWSCALE_ALPHA
return yuva420_bgr32_MMX;
#endif
break;
} else return yuv420_bgr32_MMX;
case PIX_FMT_RGB24: return yuv420_rgb24_MMX;
case PIX_FMT_BGR24: return yuv420_bgr24_MMX;
case PIX_FMT_RGB565: return yuv420_rgb16_MMX;
case PIX_FMT_RGB555: return yuv420_rgb15_MMX;
}
}
return NULL;
}
| 5,069 |
qemu | 37064a8b6f9075e18b05bfc6d5264b138a224713 | 1 | void cpsr_write(CPUARMState *env, uint32_t val, uint32_t mask)
{
if (mask & CPSR_NZCV) {
env->ZF = (~val) & CPSR_Z;
env->NF = val;
env->CF = (val >> 29) & 1;
env->VF = (val << 3) & 0x80000000;
}
if (mask & CPSR_Q)
env->QF = ((val & CPSR_Q) != 0);
if (mask & CPSR_T)
env->thumb = ((val & CPSR_T) != 0);
if (mask & CPSR_IT_0_1) {
env->condexec_bits &= ~3;
env->condexec_bits |= (val >> 25) & 3;
}
if (mask & CPSR_IT_2_7) {
env->condexec_bits &= 3;
env->condexec_bits |= (val >> 8) & 0xfc;
}
if (mask & CPSR_GE) {
env->GE = (val >> 16) & 0xf;
}
if ((env->uncached_cpsr ^ val) & mask & CPSR_M) {
switch_mode(env, val & CPSR_M);
}
mask &= ~CACHED_CPSR_BITS;
env->uncached_cpsr = (env->uncached_cpsr & ~mask) | (val & mask);
}
| 5,070 |
FFmpeg | 06a0d5ef5ce3fd9236a9fa0ff0f37ea4107b747d | 0 | static av_always_inline av_flatten void FUNCC(h264_loop_filter_luma)(uint8_t *p_pix, int xstride, int ystride, int inner_iters, int alpha, int beta, int8_t *tc0)
{
pixel *pix = (pixel*)p_pix;
int i, d;
xstride >>= sizeof(pixel)-1;
ystride >>= sizeof(pixel)-1;
alpha <<= BIT_DEPTH - 8;
beta <<= BIT_DEPTH - 8;
for( i = 0; i < 4; i++ ) {
const int tc_orig = tc0[i] << (BIT_DEPTH - 8);
if( tc_orig < 0 ) {
pix += inner_iters*ystride;
continue;
}
for( d = 0; d < inner_iters; d++ ) {
const int p0 = pix[-1*xstride];
const int p1 = pix[-2*xstride];
const int p2 = pix[-3*xstride];
const int q0 = pix[0];
const int q1 = pix[1*xstride];
const int q2 = pix[2*xstride];
if( FFABS( p0 - q0 ) < alpha &&
FFABS( p1 - p0 ) < beta &&
FFABS( q1 - q0 ) < beta ) {
int tc = tc_orig;
int i_delta;
if( FFABS( p2 - p0 ) < beta ) {
if(tc_orig)
pix[-2*xstride] = p1 + av_clip( (( p2 + ( ( p0 + q0 + 1 ) >> 1 ) ) >> 1) - p1, -tc_orig, tc_orig );
tc++;
}
if( FFABS( q2 - q0 ) < beta ) {
if(tc_orig)
pix[ xstride] = q1 + av_clip( (( q2 + ( ( p0 + q0 + 1 ) >> 1 ) ) >> 1) - q1, -tc_orig, tc_orig );
tc++;
}
i_delta = av_clip( (((q0 - p0 ) << 2) + (p1 - q1) + 4) >> 3, -tc, tc );
pix[-xstride] = av_clip_pixel( p0 + i_delta ); /* p0' */
pix[0] = av_clip_pixel( q0 - i_delta ); /* q0' */
}
pix += ystride;
}
}
}
| 5,073 |
qemu | f3c32fce3688fe1f13ceb0777faa1fc19d66d1fc | 1 | static int nbd_errno_to_system_errno(int err)
{
switch (err) {
case NBD_SUCCESS:
return 0;
case NBD_EPERM:
return EPERM;
case NBD_EIO:
return EIO;
case NBD_ENOMEM:
return ENOMEM;
case NBD_ENOSPC:
return ENOSPC;
case NBD_EINVAL:
default:
return EINVAL;
}
}
| 5,074 |
qemu | 6b4495401bdf442457b713b7e3994b465c55af35 | 1 | pcie_cap_v1_fill(uint8_t *exp_cap, uint8_t port, uint8_t type, uint8_t version)
{
/* capability register
interrupt message number defaults to 0 */
pci_set_word(exp_cap + PCI_EXP_FLAGS,
((type << PCI_EXP_FLAGS_TYPE_SHIFT) & PCI_EXP_FLAGS_TYPE) |
version);
/* device capability register
* table 7-12:
* roll based error reporting bit must be set by all
* Functions conforming to the ECN, PCI Express Base
* Specification, Revision 1.1., or subsequent PCI Express Base
* Specification revisions.
*/
pci_set_long(exp_cap + PCI_EXP_DEVCAP, PCI_EXP_DEVCAP_RBER);
pci_set_long(exp_cap + PCI_EXP_LNKCAP,
(port << PCI_EXP_LNKCAP_PN_SHIFT) |
PCI_EXP_LNKCAP_ASPMS_0S |
PCI_EXP_LNK_MLW_1 |
PCI_EXP_LNK_LS_25);
pci_set_word(exp_cap + PCI_EXP_LNKSTA,
PCI_EXP_LNK_MLW_1 | PCI_EXP_LNK_LS_25 |PCI_EXP_LNKSTA_DLLLA);
}
| 5,075 |
FFmpeg | fd5293d216316752fd34dcb29051e748f076e5fb | 1 | static int start_frame(AVFilterLink *inlink, AVFilterBufferRef *picref)
{
AVFilterContext *ctx = inlink->dst;
TInterlaceContext *tinterlace = ctx->priv;
avfilter_unref_buffer(tinterlace->cur);
tinterlace->cur = tinterlace->next;
tinterlace->next = picref;
return 0;
} | 5,076 |
FFmpeg | bbcc09518e0d1efc189a43ff0120c1a31f51c802 | 0 | static void write_odml_master(AVFormatContext *s, int stream_index)
{
AVIOContext *pb = s->pb;
AVStream *st = s->streams[stream_index];
AVCodecContext *enc = st->codec;
AVIStream *avist = st->priv_data;
unsigned char tag[5];
int j;
/* Starting to lay out AVI OpenDML master index.
* We want to make it JUNK entry for now, since we'd
* like to get away without making AVI an OpenDML one
* for compatibility reasons. */
avist->indexes.entry = avist->indexes.ents_allocated = 0;
avist->indexes.indx_start = ff_start_tag(pb, "JUNK");
avio_wl16(pb, 4); /* wLongsPerEntry */
avio_w8(pb, 0); /* bIndexSubType (0 == frame index) */
avio_w8(pb, 0); /* bIndexType (0 == AVI_INDEX_OF_INDEXES) */
avio_wl32(pb, 0); /* nEntriesInUse (will fill out later on) */
ffio_wfourcc(pb, avi_stream2fourcc(tag, stream_index, enc->codec_type));
/* dwChunkId */
avio_wl64(pb, 0); /* dwReserved[3] */
avio_wl32(pb, 0); /* Must be 0. */
for (j = 0; j < AVI_MASTER_INDEX_SIZE * 2; j++)
avio_wl64(pb, 0);
ff_end_tag(pb, avist->indexes.indx_start);
}
| 5,077 |
FFmpeg | 85e7386ae0d33ede4c575d4df4c1faae6c906338 | 0 | static void cook_imlt(COOKContext *q, float* inbuffer, float* outbuffer)
{
int i;
q->mdct_ctx.fft.imdct_calc(&q->mdct_ctx, outbuffer, inbuffer, q->mdct_tmp);
for(i = 0; i < q->samples_per_channel; i++){
float tmp = outbuffer[i];
outbuffer[i] = q->mlt_window[i] * outbuffer[q->samples_per_channel + i];
outbuffer[q->samples_per_channel + i] = q->mlt_window[q->samples_per_channel - 1 - i] * -tmp;
}
}
| 5,078 |
FFmpeg | f1f298cd32b18bb910ff045df327ccb139628db7 | 0 | static int mp3_write_trailer(struct AVFormatContext *s)
{
uint8_t buf[ID3v1_TAG_SIZE];
MP3Context *mp3 = s->priv_data;
/* write the id3v1 tag */
if (id3v1_create_tag(s, buf) > 0) {
avio_write(s->pb, buf, ID3v1_TAG_SIZE);
}
/* write number of frames */
if (mp3 && mp3->nb_frames_offset) {
avio_seek(s->pb, mp3->nb_frames_offset, SEEK_SET);
avio_wb32(s->pb, s->streams[0]->nb_frames);
avio_seek(s->pb, 0, SEEK_END);
}
avio_flush(s->pb);
return 0;
}
| 5,079 |
qemu | 0fdddf80a88ac2efe068990d1878f472bb6b95d9 | 0 | int64_t qemu_get_clock(QEMUClock *clock)
{
switch(clock->type) {
case QEMU_TIMER_REALTIME:
return get_clock() / 1000000;
default:
case QEMU_TIMER_VIRTUAL:
if (use_icount) {
return cpu_get_icount();
} else {
return cpu_get_clock();
}
}
}
| 5,080 |
qemu | 7d553f27fce284805d7f94603932045ee3bbb979 | 0 | static int usb_qdev_exit(DeviceState *qdev)
{
USBDevice *dev = USB_DEVICE(qdev);
if (dev->attached) {
usb_device_detach(dev);
}
usb_device_handle_destroy(dev);
if (dev->port) {
usb_release_port(dev);
}
return 0;
}
| 5,082 |
qemu | 77e4743c94d2a926623e280913e05ad6c840791e | 0 | void scsi_req_build_sense(SCSIRequest *req, SCSISense sense)
{
trace_scsi_req_build_sense(req->dev->id, req->lun, req->tag,
sense.key, sense.asc, sense.ascq);
memset(req->sense, 0, 18);
req->sense[0] = 0xf0;
req->sense[2] = sense.key;
req->sense[7] = 10;
req->sense[12] = sense.asc;
req->sense[13] = sense.ascq;
req->sense_len = 18;
}
| 5,083 |
FFmpeg | 14c8ee00ffd9d45e6e0c6f11a957ce7e56f7eb3a | 0 | static int vp3_decode_frame(AVCodecContext *avctx,
void *data, int *got_frame,
AVPacket *avpkt)
{
const uint8_t *buf = avpkt->data;
int buf_size = avpkt->size;
Vp3DecodeContext *s = avctx->priv_data;
GetBitContext gb;
int i;
int ret;
init_get_bits(&gb, buf, buf_size * 8);
#if CONFIG_THEORA_DECODER
if (s->theora && get_bits1(&gb))
{
int type = get_bits(&gb, 7);
skip_bits_long(&gb, 6*8); /* "theora" */
if (type == 0) {
if (s->avctx->active_thread_type&FF_THREAD_FRAME) {
av_log(avctx, AV_LOG_ERROR, "midstream reconfiguration with multithreading is unsupported, try -threads 1\n");
return AVERROR_PATCHWELCOME;
}
vp3_decode_end(avctx);
ret = theora_decode_header(avctx, &gb);
if (ret < 0) {
vp3_decode_end(avctx);
} else
ret = vp3_decode_init(avctx);
return ret;
} else if (type == 2) {
ret = theora_decode_tables(avctx, &gb);
if (ret < 0) {
vp3_decode_end(avctx);
} else
ret = vp3_decode_init(avctx);
return ret;
}
av_log(avctx, AV_LOG_ERROR, "Header packet passed to frame decoder, skipping\n");
return -1;
}
#endif
s->keyframe = !get_bits1(&gb);
if (!s->all_fragments) {
av_log(avctx, AV_LOG_ERROR, "Data packet without prior valid headers\n");
return -1;
}
if (!s->theora)
skip_bits(&gb, 1);
for (i = 0; i < 3; i++)
s->last_qps[i] = s->qps[i];
s->nqps=0;
do{
s->qps[s->nqps++]= get_bits(&gb, 6);
} while(s->theora >= 0x030200 && s->nqps<3 && get_bits1(&gb));
for (i = s->nqps; i < 3; i++)
s->qps[i] = -1;
if (s->avctx->debug & FF_DEBUG_PICT_INFO)
av_log(s->avctx, AV_LOG_INFO, " VP3 %sframe #%d: Q index = %d\n",
s->keyframe?"key":"", avctx->frame_number+1, s->qps[0]);
s->skip_loop_filter = !s->filter_limit_values[s->qps[0]] ||
avctx->skip_loop_filter >= (s->keyframe ? AVDISCARD_ALL : AVDISCARD_NONKEY);
if (s->qps[0] != s->last_qps[0])
init_loop_filter(s);
for (i = 0; i < s->nqps; i++)
// reinit all dequantizers if the first one changed, because
// the DC of the first quantizer must be used for all matrices
if (s->qps[i] != s->last_qps[i] || s->qps[0] != s->last_qps[0])
init_dequantizer(s, i);
if (avctx->skip_frame >= AVDISCARD_NONKEY && !s->keyframe)
return buf_size;
s->current_frame.reference = 3;
s->current_frame.pict_type = s->keyframe ? AV_PICTURE_TYPE_I : AV_PICTURE_TYPE_P;
s->current_frame.key_frame = s->keyframe;
if (ff_thread_get_buffer(avctx, &s->current_frame) < 0) {
av_log(s->avctx, AV_LOG_ERROR, "get_buffer() failed\n");
goto error;
}
if (!s->edge_emu_buffer)
s->edge_emu_buffer = av_malloc(9*FFABS(s->current_frame.linesize[0]));
if (s->keyframe) {
if (!s->theora)
{
skip_bits(&gb, 4); /* width code */
skip_bits(&gb, 4); /* height code */
if (s->version)
{
s->version = get_bits(&gb, 5);
if (avctx->frame_number == 0)
av_log(s->avctx, AV_LOG_DEBUG, "VP version: %d\n", s->version);
}
}
if (s->version || s->theora)
{
if (get_bits1(&gb))
av_log(s->avctx, AV_LOG_ERROR, "Warning, unsupported keyframe coding type?!\n");
skip_bits(&gb, 2); /* reserved? */
}
} else {
if (!s->golden_frame.data[0]) {
av_log(s->avctx, AV_LOG_WARNING, "vp3: first frame not a keyframe\n");
s->golden_frame.reference = 3;
s->golden_frame.pict_type = AV_PICTURE_TYPE_I;
if (ff_thread_get_buffer(avctx, &s->golden_frame) < 0) {
av_log(s->avctx, AV_LOG_ERROR, "get_buffer() failed\n");
goto error;
}
s->last_frame = s->golden_frame;
s->last_frame.type = FF_BUFFER_TYPE_COPY;
ff_thread_report_progress(&s->last_frame, INT_MAX, 0);
}
}
memset(s->all_fragments, 0, s->fragment_count * sizeof(Vp3Fragment));
ff_thread_finish_setup(avctx);
if (unpack_superblocks(s, &gb)){
av_log(s->avctx, AV_LOG_ERROR, "error in unpack_superblocks\n");
goto error;
}
if (unpack_modes(s, &gb)){
av_log(s->avctx, AV_LOG_ERROR, "error in unpack_modes\n");
goto error;
}
if (unpack_vectors(s, &gb)){
av_log(s->avctx, AV_LOG_ERROR, "error in unpack_vectors\n");
goto error;
}
if (unpack_block_qpis(s, &gb)){
av_log(s->avctx, AV_LOG_ERROR, "error in unpack_block_qpis\n");
goto error;
}
if (unpack_dct_coeffs(s, &gb)){
av_log(s->avctx, AV_LOG_ERROR, "error in unpack_dct_coeffs\n");
goto error;
}
for (i = 0; i < 3; i++) {
int height = s->height >> (i && s->chroma_y_shift);
if (s->flipped_image)
s->data_offset[i] = 0;
else
s->data_offset[i] = (height-1) * s->current_frame.linesize[i];
}
s->last_slice_end = 0;
for (i = 0; i < s->c_superblock_height; i++)
render_slice(s, i);
// filter the last row
for (i = 0; i < 3; i++) {
int row = (s->height >> (3+(i && s->chroma_y_shift))) - 1;
apply_loop_filter(s, i, row, row+1);
}
vp3_draw_horiz_band(s, s->avctx->height);
*got_frame = 1;
*(AVFrame*)data= s->current_frame;
if (!HAVE_THREADS || !(s->avctx->active_thread_type&FF_THREAD_FRAME))
update_frames(avctx);
return buf_size;
error:
ff_thread_report_progress(&s->current_frame, INT_MAX, 0);
if (!HAVE_THREADS || !(s->avctx->active_thread_type&FF_THREAD_FRAME))
avctx->release_buffer(avctx, &s->current_frame);
return -1;
}
| 5,084 |
qemu | ec05ec26f940564b1e07bf88857035ec27e21dd8 | 0 | void memory_region_reset_dirty(MemoryRegion *mr, hwaddr addr,
hwaddr size, unsigned client)
{
assert(mr->terminates);
cpu_physical_memory_test_and_clear_dirty(mr->ram_addr + addr, size,
client);
}
| 5,086 |
qemu | 43771539d4666cba16298fc6b0ea63867425277c | 0 | void qemu_ram_free_from_ptr(ram_addr_t addr)
{
RAMBlock *block;
/* This assumes the iothread lock is taken here too. */
qemu_mutex_lock_ramlist();
QTAILQ_FOREACH(block, &ram_list.blocks, next) {
if (addr == block->offset) {
QTAILQ_REMOVE(&ram_list.blocks, block, next);
ram_list.mru_block = NULL;
ram_list.version++;
g_free(block);
break;
}
}
qemu_mutex_unlock_ramlist();
}
| 5,088 |
qemu | 88266f5aa70fa71fd5cc20aa4dbeb7a7bd8d2e92 | 0 | void bdrv_io_limits_enable(BlockDriverState *bs)
{
qemu_co_queue_init(&bs->throttled_reqs);
bs->block_timer = qemu_new_timer_ns(vm_clock, bdrv_block_timer, bs);
bs->io_limits_enabled = true;
}
| 5,089 |
qemu | a8170e5e97ad17ca169c64ba87ae2f53850dab4c | 0 | static void cadence_ttc_write(void *opaque, target_phys_addr_t offset,
uint64_t value, unsigned size)
{
CadenceTimerState *s = cadence_timer_from_addr(opaque, offset);
DB_PRINT("addr: %08x data %08x\n", offset, (unsigned)value);
cadence_timer_sync(s);
switch (offset) {
case 0x00: /* clock control */
case 0x04:
case 0x08:
s->reg_clock = value & 0x3F;
break;
case 0x0c: /* counter control */
case 0x10:
case 0x14:
if (value & COUNTER_CTRL_RST) {
s->reg_value = 0;
}
s->reg_count = value & 0x3f & ~COUNTER_CTRL_RST;
break;
case 0x24: /* interval register */
case 0x28:
case 0x2c:
s->reg_interval = value & 0xffff;
break;
case 0x30: /* match register */
case 0x34:
case 0x38:
s->reg_match[0] = value & 0xffff;
case 0x3c: /* match register */
case 0x40:
case 0x44:
s->reg_match[1] = value & 0xffff;
case 0x48: /* match register */
case 0x4c:
case 0x50:
s->reg_match[2] = value & 0xffff;
break;
case 0x54: /* interrupt register */
case 0x58:
case 0x5c:
break;
case 0x60: /* interrupt enable */
case 0x64:
case 0x68:
s->reg_intr_en = value & 0x3f;
break;
case 0x6c: /* event control */
case 0x70:
case 0x74:
s->reg_event_ctrl = value & 0x07;
break;
default:
return;
}
cadence_timer_run(s);
cadence_timer_update(s);
}
| 5,090 |
qemu | ef546f1275f6563e8934dd5e338d29d9f9909ca6 | 0 | static void virtio_scsi_hotplug(HotplugHandler *hotplug_dev, DeviceState *dev,
Error **errp)
{
VirtIODevice *vdev = VIRTIO_DEVICE(hotplug_dev);
VirtIOSCSI *s = VIRTIO_SCSI(vdev);
SCSIDevice *sd = SCSI_DEVICE(dev);
if (s->ctx && !s->dataplane_disabled) {
if (blk_op_is_blocked(sd->conf.blk, BLOCK_OP_TYPE_DATAPLANE, errp)) {
return;
}
blk_op_block_all(sd->conf.blk, s->blocker);
}
if ((vdev->guest_features >> VIRTIO_SCSI_F_HOTPLUG) & 1) {
virtio_scsi_push_event(s, sd,
VIRTIO_SCSI_T_TRANSPORT_RESET,
VIRTIO_SCSI_EVT_RESET_RESCAN);
}
}
| 5,092 |
qemu | a8170e5e97ad17ca169c64ba87ae2f53850dab4c | 0 | static int write_elf64_load(DumpState *s, MemoryMapping *memory_mapping,
int phdr_index, target_phys_addr_t offset)
{
Elf64_Phdr phdr;
int ret;
int endian = s->dump_info.d_endian;
memset(&phdr, 0, sizeof(Elf64_Phdr));
phdr.p_type = cpu_convert_to_target32(PT_LOAD, endian);
phdr.p_offset = cpu_convert_to_target64(offset, endian);
phdr.p_paddr = cpu_convert_to_target64(memory_mapping->phys_addr, endian);
if (offset == -1) {
/* When the memory is not stored into vmcore, offset will be -1 */
phdr.p_filesz = 0;
} else {
phdr.p_filesz = cpu_convert_to_target64(memory_mapping->length, endian);
}
phdr.p_memsz = cpu_convert_to_target64(memory_mapping->length, endian);
phdr.p_vaddr = cpu_convert_to_target64(memory_mapping->virt_addr, endian);
ret = fd_write_vmcore(&phdr, sizeof(Elf64_Phdr), s);
if (ret < 0) {
dump_error(s, "dump: failed to write program header table.\n");
return -1;
}
return 0;
}
| 5,094 |
FFmpeg | eed752d61da332fb13e9893a175a90fed7b1d7d3 | 0 | static void SET_TYPE(resample_one)(ResampleContext *c,
void *dst0, int dst_index, const void *src0,
int src_size, int index, int frac)
{
FELEM *dst = dst0;
const FELEM *src = src0;
int i;
int sample_index = index >> c->phase_shift;
FELEM2 val = 0;
FELEM *filter = ((FELEM *)c->filter_bank) +
c->filter_length * (index & c->phase_mask);
if (sample_index < 0) {
for (i = 0; i < c->filter_length; i++)
val += src[FFABS(sample_index + i) % src_size] *
(FELEM2)filter[i];
} else if (c->linear) {
FELEM2 v2 = 0;
for (i = 0; i < c->filter_length; i++) {
val += src[abs(sample_index + i)] * (FELEM2)filter[i];
v2 += src[abs(sample_index + i)] * (FELEM2)filter[i + c->filter_length];
}
val += (v2 - val) * (FELEML)frac / c->src_incr;
} else {
for (i = 0; i < c->filter_length; i++)
val += src[sample_index + i] * (FELEM2)filter[i];
}
OUT(dst[dst_index], val);
}
| 5,095 |
qemu | ddca7f86ac022289840e0200fd4050b2b58e9176 | 0 | static void v9fs_xattrcreate(void *opaque)
{
int flags;
int32_t fid;
int64_t size;
ssize_t err = 0;
V9fsString name;
size_t offset = 7;
V9fsFidState *file_fidp;
V9fsFidState *xattr_fidp;
V9fsPDU *pdu = opaque;
V9fsState *s = pdu->s;
pdu_unmarshal(pdu, offset, "dsqd",
&fid, &name, &size, &flags);
trace_v9fs_xattrcreate(pdu->tag, pdu->id, fid, name.data, size, flags);
file_fidp = get_fid(pdu, fid);
if (file_fidp == NULL) {
err = -EINVAL;
goto out_nofid;
}
/* Make the file fid point to xattr */
xattr_fidp = file_fidp;
xattr_fidp->fid_type = P9_FID_XATTR;
xattr_fidp->fs.xattr.copied_len = 0;
xattr_fidp->fs.xattr.len = size;
xattr_fidp->fs.xattr.flags = flags;
v9fs_string_init(&xattr_fidp->fs.xattr.name);
v9fs_string_copy(&xattr_fidp->fs.xattr.name, &name);
if (size) {
xattr_fidp->fs.xattr.value = g_malloc(size);
} else {
xattr_fidp->fs.xattr.value = NULL;
}
err = offset;
put_fid(pdu, file_fidp);
out_nofid:
complete_pdu(s, pdu, err);
v9fs_string_free(&name);
}
| 5,096 |
qemu | 86f6ae67e157362f3b141649874213ce01dcc622 | 0 | uint64_t bdrv_dirty_bitmap_serialization_size(const BdrvDirtyBitmap *bitmap,
uint64_t start, uint64_t count)
{
return hbitmap_serialization_size(bitmap->bitmap, start, count);
}
| 5,097 |
qemu | 498f21405a286f718a0767c791b7d2db19f4e5bd | 0 | static coroutine_fn int sd_co_discard(BlockDriverState *bs, int64_t sector_num,
int nb_sectors)
{
SheepdogAIOCB *acb;
QEMUIOVector dummy;
BDRVSheepdogState *s = bs->opaque;
int ret;
if (!s->discard_supported) {
return 0;
}
acb = sd_aio_setup(bs, &dummy, sector_num, nb_sectors);
acb->aiocb_type = AIOCB_DISCARD_OBJ;
acb->aio_done_func = sd_finish_aiocb;
retry:
if (check_overwrapping_aiocb(s, acb)) {
qemu_co_queue_wait(&s->overwrapping_queue);
goto retry;
}
ret = sd_co_rw_vector(acb);
if (ret <= 0) {
QLIST_REMOVE(acb, aiocb_siblings);
qemu_co_queue_restart_all(&s->overwrapping_queue);
qemu_aio_unref(acb);
return ret;
}
qemu_coroutine_yield();
QLIST_REMOVE(acb, aiocb_siblings);
qemu_co_queue_restart_all(&s->overwrapping_queue);
return acb->ret;
}
| 5,100 |
qemu | 1f01e50b8330c24714ddca5841fdbb703076b121 | 0 | static void coroutine_fn qed_need_check_timer_entry(void *opaque)
{
BDRVQEDState *s = opaque;
int ret;
/* The timer should only fire when allocating writes have drained */
assert(!s->allocating_acb);
trace_qed_need_check_timer_cb(s);
qed_acquire(s);
qed_plug_allocating_write_reqs(s);
/* Ensure writes are on disk before clearing flag */
ret = bdrv_co_flush(s->bs->file->bs);
qed_release(s);
if (ret < 0) {
qed_unplug_allocating_write_reqs(s);
return;
}
s->header.features &= ~QED_F_NEED_CHECK;
ret = qed_write_header(s);
(void) ret;
qed_unplug_allocating_write_reqs(s);
ret = bdrv_co_flush(s->bs);
(void) ret;
}
| 5,103 |
qemu | a3251186fc6a04d421e9c4b65aa04ec32379ec38 | 0 | static void gen_op_update_neg_cc(void)
{
tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
}
| 5,105 |
FFmpeg | 87e8788680e16c51f6048af26f3f7830c35207a5 | 0 | static int thp_probe(AVProbeData *p)
{
/* check file header */
if (p->buf_size < 4)
return 0;
if (AV_RL32(p->buf) == MKTAG('T', 'H', 'P', '\0'))
return AVPROBE_SCORE_MAX;
else
return 0;
}
| 5,106 |
qemu | 85c97ca7a10b93216bc95052e9dabe3a4bb8736a | 0 | static int coroutine_fn bdrv_aligned_preadv(BlockDriverState *bs,
BdrvTrackedRequest *req, int64_t offset, unsigned int bytes,
int64_t align, QEMUIOVector *qiov, int flags)
{
int64_t total_bytes, max_bytes;
int ret = 0;
uint64_t bytes_remaining = bytes;
int max_transfer;
assert(is_power_of_2(align));
assert((offset & (align - 1)) == 0);
assert((bytes & (align - 1)) == 0);
assert(!qiov || bytes == qiov->size);
assert((bs->open_flags & BDRV_O_NO_IO) == 0);
max_transfer = QEMU_ALIGN_DOWN(MIN_NON_ZERO(bs->bl.max_transfer, INT_MAX),
align);
/* TODO: We would need a per-BDS .supported_read_flags and
* potential fallback support, if we ever implement any read flags
* to pass through to drivers. For now, there aren't any
* passthrough flags. */
assert(!(flags & ~(BDRV_REQ_NO_SERIALISING | BDRV_REQ_COPY_ON_READ)));
/* Handle Copy on Read and associated serialisation */
if (flags & BDRV_REQ_COPY_ON_READ) {
/* If we touch the same cluster it counts as an overlap. This
* guarantees that allocating writes will be serialized and not race
* with each other for the same cluster. For example, in copy-on-read
* it ensures that the CoR read and write operations are atomic and
* guest writes cannot interleave between them. */
mark_request_serialising(req, bdrv_get_cluster_size(bs));
}
if (!(flags & BDRV_REQ_NO_SERIALISING)) {
wait_serialising_requests(req);
}
if (flags & BDRV_REQ_COPY_ON_READ) {
int64_t start_sector = offset >> BDRV_SECTOR_BITS;
int64_t end_sector = DIV_ROUND_UP(offset + bytes, BDRV_SECTOR_SIZE);
unsigned int nb_sectors = end_sector - start_sector;
int pnum;
ret = bdrv_is_allocated(bs, start_sector, nb_sectors, &pnum);
if (ret < 0) {
goto out;
}
if (!ret || pnum != nb_sectors) {
ret = bdrv_co_do_copy_on_readv(bs, offset, bytes, qiov);
goto out;
}
}
/* Forward the request to the BlockDriver, possibly fragmenting it */
total_bytes = bdrv_getlength(bs);
if (total_bytes < 0) {
ret = total_bytes;
goto out;
}
max_bytes = ROUND_UP(MAX(0, total_bytes - offset), align);
if (bytes <= max_bytes && bytes <= max_transfer) {
ret = bdrv_driver_preadv(bs, offset, bytes, qiov, 0);
goto out;
}
while (bytes_remaining) {
int num;
if (max_bytes) {
QEMUIOVector local_qiov;
num = MIN(bytes_remaining, MIN(max_bytes, max_transfer));
assert(num);
qemu_iovec_init(&local_qiov, qiov->niov);
qemu_iovec_concat(&local_qiov, qiov, bytes - bytes_remaining, num);
ret = bdrv_driver_preadv(bs, offset + bytes - bytes_remaining,
num, &local_qiov, 0);
max_bytes -= num;
qemu_iovec_destroy(&local_qiov);
} else {
num = bytes_remaining;
ret = qemu_iovec_memset(qiov, bytes - bytes_remaining, 0,
bytes_remaining);
}
if (ret < 0) {
goto out;
}
bytes_remaining -= num;
}
out:
return ret < 0 ? ret : 0;
}
| 5,107 |
qemu | 3b098d56979d2f7fd707c5be85555d114353a28d | 0 | Visitor *qmp_output_get_visitor(QmpOutputVisitor *v)
{
return &v->visitor;
}
| 5,108 |
qemu | a153bf52b37e148f052b0869600877130671a03d | 0 | bool aio_dispatch(AioContext *ctx, bool dispatch_fds)
{
bool progress;
progress = aio_bh_poll(ctx);
if (dispatch_fds) {
progress |= aio_dispatch_handlers(ctx, INVALID_HANDLE_VALUE);
}
progress |= timerlistgroup_run_timers(&ctx->tlg);
return progress;
}
| 5,109 |
qemu | 81cf8d8adc64203567e03326c13ea4abec9fe5df | 0 | void helper_check_iob(CPUX86State *env, uint32_t t0)
{
check_io(env, t0, 1);
}
| 5,110 |
qemu | eb5d4f5329df83ea15244b47f7fbca21adaae41b | 0 | static void slirp_state_save(QEMUFile *f, void *opaque)
{
Slirp *slirp = opaque;
struct ex_list *ex_ptr;
for (ex_ptr = slirp->exec_list; ex_ptr; ex_ptr = ex_ptr->ex_next)
if (ex_ptr->ex_pty == 3) {
struct socket *so;
so = slirp_find_ctl_socket(slirp, ex_ptr->ex_addr,
ntohs(ex_ptr->ex_fport));
if (!so)
continue;
qemu_put_byte(f, 42);
vmstate_save_state(f, &vmstate_slirp_socket, so, NULL);
}
qemu_put_byte(f, 0);
qemu_put_be16(f, slirp->ip_id);
slirp_bootp_save(f, slirp);
}
| 5,111 |
qemu | 7859cc6e39bf86f890bb1c72fd9ba41deb6ce2e7 | 0 | static inline void tlb_update_dirty(CPUTLBEntry *tlb_entry)
{
ram_addr_t ram_addr;
void *p;
if ((tlb_entry->addr_write & ~TARGET_PAGE_MASK) == io_mem_ram.ram_addr) {
p = (void *)(unsigned long)((tlb_entry->addr_write & TARGET_PAGE_MASK)
+ tlb_entry->addend);
ram_addr = qemu_ram_addr_from_host_nofail(p);
if (!cpu_physical_memory_is_dirty(ram_addr)) {
tlb_entry->addr_write |= TLB_NOTDIRTY;
}
}
}
| 5,112 |
qemu | bf55b7afce53718ef96f4e6616da62c0ccac37dd | 0 | static IOMMUTLBEntry spapr_tce_translate_iommu(MemoryRegion *iommu, hwaddr addr,
bool is_write)
{
sPAPRTCETable *tcet = container_of(iommu, sPAPRTCETable, iommu);
uint64_t tce;
IOMMUTLBEntry ret = {
.target_as = &address_space_memory,
.iova = 0,
.translated_addr = 0,
.addr_mask = ~(hwaddr)0,
.perm = IOMMU_NONE,
};
if ((addr >> tcet->page_shift) < tcet->nb_table) {
/* Check if we are in bound */
hwaddr page_mask = IOMMU_PAGE_MASK(tcet->page_shift);
tce = tcet->table[addr >> tcet->page_shift];
ret.iova = addr & page_mask;
ret.translated_addr = tce & page_mask;
ret.addr_mask = ~page_mask;
ret.perm = spapr_tce_iommu_access_flags(tce);
}
trace_spapr_iommu_xlate(tcet->liobn, addr, ret.iova, ret.perm,
ret.addr_mask);
return ret;
}
| 5,113 |
qemu | bf55b7afce53718ef96f4e6616da62c0ccac37dd | 0 | int rpcit_service_call(S390CPU *cpu, uint8_t r1, uint8_t r2)
{
CPUS390XState *env = &cpu->env;
uint32_t fh;
S390PCIBusDevice *pbdev;
S390PCIIOMMU *iommu;
hwaddr start, end;
IOMMUTLBEntry entry;
MemoryRegion *mr;
cpu_synchronize_state(CPU(cpu));
if (env->psw.mask & PSW_MASK_PSTATE) {
program_interrupt(env, PGM_PRIVILEGED, 4);
goto out;
}
if (r2 & 0x1) {
program_interrupt(env, PGM_SPECIFICATION, 4);
goto out;
}
fh = env->regs[r1] >> 32;
start = env->regs[r2];
end = start + env->regs[r2 + 1];
pbdev = s390_pci_find_dev_by_fh(s390_get_phb(), fh);
if (!pbdev) {
DPRINTF("rpcit no pci dev\n");
setcc(cpu, ZPCI_PCI_LS_INVAL_HANDLE);
goto out;
}
switch (pbdev->state) {
case ZPCI_FS_RESERVED:
case ZPCI_FS_STANDBY:
case ZPCI_FS_DISABLED:
case ZPCI_FS_PERMANENT_ERROR:
setcc(cpu, ZPCI_PCI_LS_INVAL_HANDLE);
return 0;
case ZPCI_FS_ERROR:
setcc(cpu, ZPCI_PCI_LS_ERR);
s390_set_status_code(env, r1, ZPCI_MOD_ST_ERROR_RECOVER);
return 0;
default:
break;
}
iommu = pbdev->iommu;
if (!iommu->g_iota) {
pbdev->state = ZPCI_FS_ERROR;
setcc(cpu, ZPCI_PCI_LS_ERR);
s390_set_status_code(env, r1, ZPCI_PCI_ST_INSUF_RES);
s390_pci_generate_error_event(ERR_EVENT_INVALAS, pbdev->fh, pbdev->fid,
start, 0);
goto out;
}
if (end < iommu->pba || start > iommu->pal) {
pbdev->state = ZPCI_FS_ERROR;
setcc(cpu, ZPCI_PCI_LS_ERR);
s390_set_status_code(env, r1, ZPCI_PCI_ST_INSUF_RES);
s390_pci_generate_error_event(ERR_EVENT_OORANGE, pbdev->fh, pbdev->fid,
start, 0);
goto out;
}
mr = &iommu->iommu_mr;
while (start < end) {
entry = mr->iommu_ops->translate(mr, start, 0);
if (!entry.translated_addr) {
pbdev->state = ZPCI_FS_ERROR;
setcc(cpu, ZPCI_PCI_LS_ERR);
s390_set_status_code(env, r1, ZPCI_PCI_ST_INSUF_RES);
s390_pci_generate_error_event(ERR_EVENT_SERR, pbdev->fh, pbdev->fid,
start, ERR_EVENT_Q_BIT);
goto out;
}
memory_region_notify_iommu(mr, entry);
start += entry.addr_mask + 1;
}
setcc(cpu, ZPCI_PCI_LS_OK);
out:
return 0;
}
| 5,114 |
qemu | 381b487d54ba18c73df9db8452028a330058c505 | 0 | static int coroutine_fn qcow2_co_is_allocated(BlockDriverState *bs,
int64_t sector_num, int nb_sectors, int *pnum)
{
BDRVQcowState *s = bs->opaque;
uint64_t cluster_offset;
int ret;
*pnum = nb_sectors;
/* FIXME We can get errors here, but the bdrv_co_is_allocated interface
* can't pass them on today */
qemu_co_mutex_lock(&s->lock);
ret = qcow2_get_cluster_offset(bs, sector_num << 9, pnum, &cluster_offset);
qemu_co_mutex_unlock(&s->lock);
if (ret < 0) {
*pnum = 0;
}
return (cluster_offset != 0);
}
| 5,115 |
qemu | 630530a6529bc3da9ab8aead7053dc753cb9ac77 | 1 | static int vmdk_write(BlockDriverState *bs, int64_t sector_num,
const uint8_t *buf, int nb_sectors)
{
BDRVVmdkState *s = bs->opaque;
int index_in_cluster, n;
uint64_t cluster_offset;
static int cid_update = 0;
while (nb_sectors > 0) {
index_in_cluster = sector_num & (s->cluster_sectors - 1);
n = s->cluster_sectors - index_in_cluster;
if (n > nb_sectors)
n = nb_sectors;
cluster_offset = get_cluster_offset(bs, sector_num << 9, 1);
if (!cluster_offset)
return -1;
if (bdrv_pwrite(s->hd, cluster_offset + index_in_cluster * 512, buf, n * 512) != n * 512)
return -1;
nb_sectors -= n;
sector_num += n;
buf += n * 512;
// update CID on the first write every time the virtual disk is opened
if (!cid_update) {
vmdk_write_cid(bs, time(NULL));
cid_update++;
}
}
return 0;
}
| 5,117 |
FFmpeg | 478f1c3d5e5463a284ea7efecfc62d47ba3be11a | 1 | static int update_thread_context(AVCodecContext *dst, const AVCodecContext *src)
{
PNGDecContext *psrc = src->priv_data;
PNGDecContext *pdst = dst->priv_data;
int ret;
if (dst == src)
return 0;
ff_thread_release_buffer(dst, &pdst->picture);
if (psrc->picture.f->data[0] &&
(ret = ff_thread_ref_frame(&pdst->picture, &psrc->picture)) < 0)
return ret;
if (CONFIG_APNG_DECODER && dst->codec_id == AV_CODEC_ID_APNG) {
pdst->width = psrc->width;
pdst->height = psrc->height;
pdst->bit_depth = psrc->bit_depth;
pdst->color_type = psrc->color_type;
pdst->compression_type = psrc->compression_type;
pdst->interlace_type = psrc->interlace_type;
pdst->filter_type = psrc->filter_type;
pdst->cur_w = psrc->cur_w;
pdst->cur_h = psrc->cur_h;
pdst->x_offset = psrc->x_offset;
pdst->y_offset = psrc->y_offset;
pdst->has_trns = psrc->has_trns;
memcpy(pdst->transparent_color_be, psrc->transparent_color_be, sizeof(pdst->transparent_color_be));
pdst->dispose_op = psrc->dispose_op;
memcpy(pdst->palette, psrc->palette, sizeof(pdst->palette));
pdst->state |= psrc->state & (PNG_IHDR | PNG_PLTE);
ff_thread_release_buffer(dst, &pdst->last_picture);
if (psrc->last_picture.f->data[0] &&
(ret = ff_thread_ref_frame(&pdst->last_picture, &psrc->last_picture)) < 0)
return ret;
ff_thread_release_buffer(dst, &pdst->previous_picture);
if (psrc->previous_picture.f->data[0] &&
(ret = ff_thread_ref_frame(&pdst->previous_picture, &psrc->previous_picture)) < 0)
return ret;
}
return 0;
}
| 5,118 |
FFmpeg | 64476d7ee86e01f43312dc5dff850d641d2b6c9a | 1 | static int read_seek(AVFormatContext *s, int stream_index, int64_t timestamp, int flags)
{
AVStream *st = s->streams[stream_index];
avio_seek(s->pb, FFMAX(timestamp, 0) * st->codec->width * st->codec->height * 4, SEEK_SET);
return 0;
}
| 5,119 |
FFmpeg | da2f1568439ff1786413899dbb4fe30d87ab2721 | 0 | static int transcode_init(void)
{
int ret = 0, i, j, k;
AVFormatContext *oc;
OutputStream *ost;
InputStream *ist;
char error[1024] = {0};
int want_sdp = 1;
for (i = 0; i < nb_filtergraphs; i++) {
FilterGraph *fg = filtergraphs[i];
for (j = 0; j < fg->nb_outputs; j++) {
OutputFilter *ofilter = fg->outputs[j];
if (!ofilter->ost || ofilter->ost->source_index >= 0)
continue;
if (fg->nb_inputs != 1)
continue;
for (k = nb_input_streams-1; k >= 0 ; k--)
if (fg->inputs[0]->ist == input_streams[k])
break;
ofilter->ost->source_index = k;
}
}
/* init framerate emulation */
for (i = 0; i < nb_input_files; i++) {
InputFile *ifile = input_files[i];
if (ifile->rate_emu)
for (j = 0; j < ifile->nb_streams; j++)
input_streams[j + ifile->ist_index]->start = av_gettime_relative();
}
/* output stream init */
for (i = 0; i < nb_output_files; i++) {
oc = output_files[i]->ctx;
if (!oc->nb_streams && !(oc->oformat->flags & AVFMT_NOSTREAMS)) {
av_dump_format(oc, i, oc->filename, 1);
av_log(NULL, AV_LOG_ERROR, "Output file #%d does not contain any stream\n", i);
return AVERROR(EINVAL);
}
}
/* init complex filtergraphs */
for (i = 0; i < nb_filtergraphs; i++)
if ((ret = avfilter_graph_config(filtergraphs[i]->graph, NULL)) < 0)
return ret;
/* for each output stream, we compute the right encoding parameters */
for (i = 0; i < nb_output_streams; i++) {
AVCodecContext *enc_ctx;
AVCodecContext *dec_ctx = NULL;
ost = output_streams[i];
oc = output_files[ost->file_index]->ctx;
ist = get_input_stream(ost);
if (ost->attachment_filename)
continue;
enc_ctx = ost->stream_copy ? ost->st->codec : ost->enc_ctx;
if (ist) {
dec_ctx = ist->dec_ctx;
ost->st->disposition = ist->st->disposition;
enc_ctx->bits_per_raw_sample = dec_ctx->bits_per_raw_sample;
enc_ctx->chroma_sample_location = dec_ctx->chroma_sample_location;
} else {
for (j=0; j<oc->nb_streams; j++) {
AVStream *st = oc->streams[j];
if (st != ost->st && st->codec->codec_type == enc_ctx->codec_type)
break;
}
if (j == oc->nb_streams)
if (enc_ctx->codec_type == AVMEDIA_TYPE_AUDIO || enc_ctx->codec_type == AVMEDIA_TYPE_VIDEO)
ost->st->disposition = AV_DISPOSITION_DEFAULT;
}
if (ost->stream_copy) {
AVRational sar;
uint64_t extra_size;
av_assert0(ist && !ost->filter);
extra_size = (uint64_t)dec_ctx->extradata_size + FF_INPUT_BUFFER_PADDING_SIZE;
if (extra_size > INT_MAX) {
return AVERROR(EINVAL);
}
/* if stream_copy is selected, no need to decode or encode */
enc_ctx->codec_id = dec_ctx->codec_id;
enc_ctx->codec_type = dec_ctx->codec_type;
if (!enc_ctx->codec_tag) {
unsigned int codec_tag;
if (!oc->oformat->codec_tag ||
av_codec_get_id (oc->oformat->codec_tag, dec_ctx->codec_tag) == enc_ctx->codec_id ||
!av_codec_get_tag2(oc->oformat->codec_tag, dec_ctx->codec_id, &codec_tag))
enc_ctx->codec_tag = dec_ctx->codec_tag;
}
enc_ctx->bit_rate = dec_ctx->bit_rate;
enc_ctx->rc_max_rate = dec_ctx->rc_max_rate;
enc_ctx->rc_buffer_size = dec_ctx->rc_buffer_size;
enc_ctx->field_order = dec_ctx->field_order;
if (dec_ctx->extradata_size) {
enc_ctx->extradata = av_mallocz(extra_size);
if (!enc_ctx->extradata) {
return AVERROR(ENOMEM);
}
memcpy(enc_ctx->extradata, dec_ctx->extradata, dec_ctx->extradata_size);
}
enc_ctx->extradata_size= dec_ctx->extradata_size;
enc_ctx->bits_per_coded_sample = dec_ctx->bits_per_coded_sample;
enc_ctx->time_base = ist->st->time_base;
/*
* Avi is a special case here because it supports variable fps but
* having the fps and timebase differe significantly adds quite some
* overhead
*/
if(!strcmp(oc->oformat->name, "avi")) {
if ( copy_tb<0 && av_q2d(ist->st->r_frame_rate) >= av_q2d(ist->st->avg_frame_rate)
&& 0.5/av_q2d(ist->st->r_frame_rate) > av_q2d(ist->st->time_base)
&& 0.5/av_q2d(ist->st->r_frame_rate) > av_q2d(dec_ctx->time_base)
&& av_q2d(ist->st->time_base) < 1.0/500 && av_q2d(dec_ctx->time_base) < 1.0/500
|| copy_tb==2){
enc_ctx->time_base.num = ist->st->r_frame_rate.den;
enc_ctx->time_base.den = 2*ist->st->r_frame_rate.num;
enc_ctx->ticks_per_frame = 2;
} else if ( copy_tb<0 && av_q2d(dec_ctx->time_base)*dec_ctx->ticks_per_frame > 2*av_q2d(ist->st->time_base)
&& av_q2d(ist->st->time_base) < 1.0/500
|| copy_tb==0){
enc_ctx->time_base = dec_ctx->time_base;
enc_ctx->time_base.num *= dec_ctx->ticks_per_frame;
enc_ctx->time_base.den *= 2;
enc_ctx->ticks_per_frame = 2;
}
} else if(!(oc->oformat->flags & AVFMT_VARIABLE_FPS)
&& strcmp(oc->oformat->name, "mov") && strcmp(oc->oformat->name, "mp4") && strcmp(oc->oformat->name, "3gp")
&& strcmp(oc->oformat->name, "3g2") && strcmp(oc->oformat->name, "psp") && strcmp(oc->oformat->name, "ipod")
&& strcmp(oc->oformat->name, "f4v")
) {
if( copy_tb<0 && dec_ctx->time_base.den
&& av_q2d(dec_ctx->time_base)*dec_ctx->ticks_per_frame > av_q2d(ist->st->time_base)
&& av_q2d(ist->st->time_base) < 1.0/500
|| copy_tb==0){
enc_ctx->time_base = dec_ctx->time_base;
enc_ctx->time_base.num *= dec_ctx->ticks_per_frame;
}
}
if ( enc_ctx->codec_tag == AV_RL32("tmcd")
&& dec_ctx->time_base.num < dec_ctx->time_base.den
&& dec_ctx->time_base.num > 0
&& 121LL*dec_ctx->time_base.num > dec_ctx->time_base.den) {
enc_ctx->time_base = dec_ctx->time_base;
}
if (ist && !ost->frame_rate.num)
ost->frame_rate = ist->framerate;
if(ost->frame_rate.num)
enc_ctx->time_base = av_inv_q(ost->frame_rate);
av_reduce(&enc_ctx->time_base.num, &enc_ctx->time_base.den,
enc_ctx->time_base.num, enc_ctx->time_base.den, INT_MAX);
if (ist->st->nb_side_data) {
ost->st->side_data = av_realloc_array(NULL, ist->st->nb_side_data,
sizeof(*ist->st->side_data));
if (!ost->st->side_data)
return AVERROR(ENOMEM);
for (j = 0; j < ist->st->nb_side_data; j++) {
const AVPacketSideData *sd_src = &ist->st->side_data[j];
AVPacketSideData *sd_dst = &ost->st->side_data[j];
sd_dst->data = av_malloc(sd_src->size);
if (!sd_dst->data)
return AVERROR(ENOMEM);
memcpy(sd_dst->data, sd_src->data, sd_src->size);
sd_dst->size = sd_src->size;
sd_dst->type = sd_src->type;
ost->st->nb_side_data++;
}
}
ost->parser = av_parser_init(enc_ctx->codec_id);
switch (enc_ctx->codec_type) {
case AVMEDIA_TYPE_AUDIO:
if (audio_volume != 256) {
av_log(NULL, AV_LOG_FATAL, "-acodec copy and -vol are incompatible (frames are not decoded)\n");
exit_program(1);
}
enc_ctx->channel_layout = dec_ctx->channel_layout;
enc_ctx->sample_rate = dec_ctx->sample_rate;
enc_ctx->channels = dec_ctx->channels;
enc_ctx->frame_size = dec_ctx->frame_size;
enc_ctx->audio_service_type = dec_ctx->audio_service_type;
enc_ctx->block_align = dec_ctx->block_align;
enc_ctx->initial_padding = dec_ctx->delay;
#if FF_API_AUDIOENC_DELAY
enc_ctx->delay = dec_ctx->delay;
#endif
if((enc_ctx->block_align == 1 || enc_ctx->block_align == 1152 || enc_ctx->block_align == 576) && enc_ctx->codec_id == AV_CODEC_ID_MP3)
enc_ctx->block_align= 0;
if(enc_ctx->codec_id == AV_CODEC_ID_AC3)
enc_ctx->block_align= 0;
break;
case AVMEDIA_TYPE_VIDEO:
enc_ctx->pix_fmt = dec_ctx->pix_fmt;
enc_ctx->width = dec_ctx->width;
enc_ctx->height = dec_ctx->height;
enc_ctx->has_b_frames = dec_ctx->has_b_frames;
if (ost->frame_aspect_ratio.num) { // overridden by the -aspect cli option
sar =
av_mul_q(ost->frame_aspect_ratio,
(AVRational){ enc_ctx->height, enc_ctx->width });
av_log(NULL, AV_LOG_WARNING, "Overriding aspect ratio "
"with stream copy may produce invalid files\n");
}
else if (ist->st->sample_aspect_ratio.num)
sar = ist->st->sample_aspect_ratio;
else
sar = dec_ctx->sample_aspect_ratio;
ost->st->sample_aspect_ratio = enc_ctx->sample_aspect_ratio = sar;
ost->st->avg_frame_rate = ist->st->avg_frame_rate;
ost->st->r_frame_rate = ist->st->r_frame_rate;
break;
case AVMEDIA_TYPE_SUBTITLE:
enc_ctx->width = dec_ctx->width;
enc_ctx->height = dec_ctx->height;
break;
case AVMEDIA_TYPE_DATA:
case AVMEDIA_TYPE_ATTACHMENT:
break;
default:
abort();
}
} else {
if (!ost->enc)
ost->enc = avcodec_find_encoder(enc_ctx->codec_id);
if (!ost->enc) {
/* should only happen when a default codec is not present. */
snprintf(error, sizeof(error), "Encoder (codec %s) not found for output stream #%d:%d",
avcodec_get_name(ost->st->codec->codec_id), ost->file_index, ost->index);
ret = AVERROR(EINVAL);
goto dump_format;
}
if (ist)
ist->decoding_needed |= DECODING_FOR_OST;
ost->encoding_needed = 1;
set_encoder_id(output_files[ost->file_index], ost);
if (!ost->filter &&
(enc_ctx->codec_type == AVMEDIA_TYPE_VIDEO ||
enc_ctx->codec_type == AVMEDIA_TYPE_AUDIO)) {
FilterGraph *fg;
fg = init_simple_filtergraph(ist, ost);
if (configure_filtergraph(fg)) {
av_log(NULL, AV_LOG_FATAL, "Error opening filters!\n");
exit_program(1);
}
}
if (enc_ctx->codec_type == AVMEDIA_TYPE_VIDEO) {
if (ost->filter && !ost->frame_rate.num)
ost->frame_rate = av_buffersink_get_frame_rate(ost->filter->filter);
if (ist && !ost->frame_rate.num)
ost->frame_rate = ist->framerate;
if (ist && !ost->frame_rate.num)
ost->frame_rate = ist->st->r_frame_rate;
if (ist && !ost->frame_rate.num) {
ost->frame_rate = (AVRational){25, 1};
av_log(NULL, AV_LOG_WARNING,
"No information "
"about the input framerate is available. Falling "
"back to a default value of 25fps for output stream #%d:%d. Use the -r option "
"if you want a different framerate.\n",
ost->file_index, ost->index);
}
// ost->frame_rate = ist->st->avg_frame_rate.num ? ist->st->avg_frame_rate : (AVRational){25, 1};
if (ost->enc && ost->enc->supported_framerates && !ost->force_fps) {
int idx = av_find_nearest_q_idx(ost->frame_rate, ost->enc->supported_framerates);
ost->frame_rate = ost->enc->supported_framerates[idx];
}
// reduce frame rate for mpeg4 to be within the spec limits
if (enc_ctx->codec_id == AV_CODEC_ID_MPEG4) {
av_reduce(&ost->frame_rate.num, &ost->frame_rate.den,
ost->frame_rate.num, ost->frame_rate.den, 65535);
}
}
switch (enc_ctx->codec_type) {
case AVMEDIA_TYPE_AUDIO:
enc_ctx->sample_fmt = ost->filter->filter->inputs[0]->format;
enc_ctx->sample_rate = ost->filter->filter->inputs[0]->sample_rate;
enc_ctx->channel_layout = ost->filter->filter->inputs[0]->channel_layout;
enc_ctx->channels = avfilter_link_get_channels(ost->filter->filter->inputs[0]);
enc_ctx->time_base = (AVRational){ 1, enc_ctx->sample_rate };
break;
case AVMEDIA_TYPE_VIDEO:
enc_ctx->time_base = av_inv_q(ost->frame_rate);
if (ost->filter && !(enc_ctx->time_base.num && enc_ctx->time_base.den))
enc_ctx->time_base = ost->filter->filter->inputs[0]->time_base;
if ( av_q2d(enc_ctx->time_base) < 0.001 && video_sync_method != VSYNC_PASSTHROUGH
&& (video_sync_method == VSYNC_CFR || video_sync_method == VSYNC_VSCFR || (video_sync_method == VSYNC_AUTO && !(oc->oformat->flags & AVFMT_VARIABLE_FPS)))){
av_log(oc, AV_LOG_WARNING, "Frame rate very high for a muxer not efficiently supporting it.\n"
"Please consider specifying a lower framerate, a different muxer or -vsync 2\n");
}
for (j = 0; j < ost->forced_kf_count; j++)
ost->forced_kf_pts[j] = av_rescale_q(ost->forced_kf_pts[j],
AV_TIME_BASE_Q,
enc_ctx->time_base);
enc_ctx->width = ost->filter->filter->inputs[0]->w;
enc_ctx->height = ost->filter->filter->inputs[0]->h;
enc_ctx->sample_aspect_ratio = ost->st->sample_aspect_ratio =
ost->frame_aspect_ratio.num ? // overridden by the -aspect cli option
av_mul_q(ost->frame_aspect_ratio, (AVRational){ enc_ctx->height, enc_ctx->width }) :
ost->filter->filter->inputs[0]->sample_aspect_ratio;
if (!strncmp(ost->enc->name, "libx264", 7) &&
enc_ctx->pix_fmt == AV_PIX_FMT_NONE &&
ost->filter->filter->inputs[0]->format != AV_PIX_FMT_YUV420P)
av_log(NULL, AV_LOG_WARNING,
"No pixel format specified, %s for H.264 encoding chosen.\n"
"Use -pix_fmt yuv420p for compatibility with outdated media players.\n",
av_get_pix_fmt_name(ost->filter->filter->inputs[0]->format));
if (!strncmp(ost->enc->name, "mpeg2video", 10) &&
enc_ctx->pix_fmt == AV_PIX_FMT_NONE &&
ost->filter->filter->inputs[0]->format != AV_PIX_FMT_YUV420P)
av_log(NULL, AV_LOG_WARNING,
"No pixel format specified, %s for MPEG-2 encoding chosen.\n"
"Use -pix_fmt yuv420p for compatibility with outdated media players.\n",
av_get_pix_fmt_name(ost->filter->filter->inputs[0]->format));
enc_ctx->pix_fmt = ost->filter->filter->inputs[0]->format;
ost->st->avg_frame_rate = ost->frame_rate;
if (!dec_ctx ||
enc_ctx->width != dec_ctx->width ||
enc_ctx->height != dec_ctx->height ||
enc_ctx->pix_fmt != dec_ctx->pix_fmt) {
enc_ctx->bits_per_raw_sample = frame_bits_per_raw_sample;
}
if (ost->forced_keyframes) {
if (!strncmp(ost->forced_keyframes, "expr:", 5)) {
ret = av_expr_parse(&ost->forced_keyframes_pexpr, ost->forced_keyframes+5,
forced_keyframes_const_names, NULL, NULL, NULL, NULL, 0, NULL);
if (ret < 0) {
av_log(NULL, AV_LOG_ERROR,
"Invalid force_key_frames expression '%s'\n", ost->forced_keyframes+5);
return ret;
}
ost->forced_keyframes_expr_const_values[FKF_N] = 0;
ost->forced_keyframes_expr_const_values[FKF_N_FORCED] = 0;
ost->forced_keyframes_expr_const_values[FKF_PREV_FORCED_N] = NAN;
ost->forced_keyframes_expr_const_values[FKF_PREV_FORCED_T] = NAN;
} else {
parse_forced_key_frames(ost->forced_keyframes, ost, ost->enc_ctx);
}
}
break;
case AVMEDIA_TYPE_SUBTITLE:
enc_ctx->time_base = (AVRational){1, 1000};
if (!enc_ctx->width) {
enc_ctx->width = input_streams[ost->source_index]->st->codec->width;
enc_ctx->height = input_streams[ost->source_index]->st->codec->height;
}
break;
case AVMEDIA_TYPE_DATA:
break;
default:
abort();
break;
}
/* two pass mode */
if (enc_ctx->flags & (CODEC_FLAG_PASS1 | CODEC_FLAG_PASS2)) {
char logfilename[1024];
FILE *f;
snprintf(logfilename, sizeof(logfilename), "%s-%d.log",
ost->logfile_prefix ? ost->logfile_prefix :
DEFAULT_PASS_LOGFILENAME_PREFIX,
i);
if (!strcmp(ost->enc->name, "libx264")) {
av_dict_set(&ost->encoder_opts, "stats", logfilename, AV_DICT_DONT_OVERWRITE);
} else {
if (enc_ctx->flags & CODEC_FLAG_PASS2) {
char *logbuffer;
size_t logbuffer_size;
if (cmdutils_read_file(logfilename, &logbuffer, &logbuffer_size) < 0) {
av_log(NULL, AV_LOG_FATAL, "Error reading log file '%s' for pass-2 encoding\n",
logfilename);
exit_program(1);
}
enc_ctx->stats_in = logbuffer;
}
if (enc_ctx->flags & CODEC_FLAG_PASS1) {
f = av_fopen_utf8(logfilename, "wb");
if (!f) {
av_log(NULL, AV_LOG_FATAL, "Cannot write log file '%s' for pass-1 encoding: %s\n",
logfilename, strerror(errno));
exit_program(1);
}
ost->logfile = f;
}
}
}
}
if (ost->disposition) {
static const AVOption opts[] = {
{ "disposition" , NULL, 0, AV_OPT_TYPE_FLAGS, { .i64 = 0 }, INT64_MIN, INT64_MAX, .unit = "flags" },
{ "default" , NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AV_DISPOSITION_DEFAULT }, .unit = "flags" },
{ "dub" , NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AV_DISPOSITION_DUB }, .unit = "flags" },
{ "original" , NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AV_DISPOSITION_ORIGINAL }, .unit = "flags" },
{ "comment" , NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AV_DISPOSITION_COMMENT }, .unit = "flags" },
{ "lyrics" , NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AV_DISPOSITION_LYRICS }, .unit = "flags" },
{ "karaoke" , NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AV_DISPOSITION_KARAOKE }, .unit = "flags" },
{ "forced" , NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AV_DISPOSITION_FORCED }, .unit = "flags" },
{ "hearing_impaired" , NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AV_DISPOSITION_HEARING_IMPAIRED }, .unit = "flags" },
{ "visual_impaired" , NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AV_DISPOSITION_VISUAL_IMPAIRED }, .unit = "flags" },
{ "clean_effects" , NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AV_DISPOSITION_CLEAN_EFFECTS }, .unit = "flags" },
{ "captions" , NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AV_DISPOSITION_CAPTIONS }, .unit = "flags" },
{ "descriptions" , NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AV_DISPOSITION_DESCRIPTIONS }, .unit = "flags" },
{ "metadata" , NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AV_DISPOSITION_METADATA }, .unit = "flags" },
{ NULL },
};
static const AVClass class = {
.class_name = "",
.item_name = av_default_item_name,
.option = opts,
.version = LIBAVUTIL_VERSION_INT,
};
const AVClass *pclass = &class;
ret = av_opt_eval_flags(&pclass, &opts[0], ost->disposition, &ost->st->disposition);
if (ret < 0)
goto dump_format;
}
}
/* open each encoder */
for (i = 0; i < nb_output_streams; i++) {
ost = output_streams[i];
if (ost->encoding_needed) {
AVCodec *codec = ost->enc;
AVCodecContext *dec = NULL;
if ((ist = get_input_stream(ost)))
dec = ist->dec_ctx;
if (dec && dec->subtitle_header) {
/* ASS code assumes this buffer is null terminated so add extra byte. */
ost->enc_ctx->subtitle_header = av_mallocz(dec->subtitle_header_size + 1);
if (!ost->enc_ctx->subtitle_header) {
ret = AVERROR(ENOMEM);
goto dump_format;
}
memcpy(ost->enc_ctx->subtitle_header, dec->subtitle_header, dec->subtitle_header_size);
ost->enc_ctx->subtitle_header_size = dec->subtitle_header_size;
}
if (!av_dict_get(ost->encoder_opts, "threads", NULL, 0))
av_dict_set(&ost->encoder_opts, "threads", "auto", 0);
av_dict_set(&ost->encoder_opts, "side_data_only_packets", "1", 0);
if ((ret = avcodec_open2(ost->enc_ctx, codec, &ost->encoder_opts)) < 0) {
if (ret == AVERROR_EXPERIMENTAL)
abort_codec_experimental(codec, 1);
snprintf(error, sizeof(error), "Error while opening encoder for output stream #%d:%d - maybe incorrect parameters such as bit_rate, rate, width or height",
ost->file_index, ost->index);
goto dump_format;
}
if (ost->enc->type == AVMEDIA_TYPE_AUDIO &&
!(ost->enc->capabilities & CODEC_CAP_VARIABLE_FRAME_SIZE))
av_buffersink_set_frame_size(ost->filter->filter,
ost->enc_ctx->frame_size);
assert_avoptions(ost->encoder_opts);
if (ost->enc_ctx->bit_rate && ost->enc_ctx->bit_rate < 1000)
av_log(NULL, AV_LOG_WARNING, "The bitrate parameter is set too low."
" It takes bits/s as argument, not kbits/s\n");
ret = avcodec_copy_context(ost->st->codec, ost->enc_ctx);
if (ret < 0) {
av_log(NULL, AV_LOG_FATAL,
"Error initializing the output stream codec context.\n");
exit_program(1);
}
// copy timebase while removing common factors
ost->st->time_base = av_add_q(ost->enc_ctx->time_base, (AVRational){0, 1});
ost->st->codec->codec= ost->enc_ctx->codec;
} else {
ret = av_opt_set_dict(ost->enc_ctx, &ost->encoder_opts);
if (ret < 0) {
av_log(NULL, AV_LOG_FATAL,
"Error setting up codec context options.\n");
return ret;
}
// copy timebase while removing common factors
ost->st->time_base = av_add_q(ost->st->codec->time_base, (AVRational){0, 1});
}
}
/* init input streams */
for (i = 0; i < nb_input_streams; i++)
if ((ret = init_input_stream(i, error, sizeof(error))) < 0) {
for (i = 0; i < nb_output_streams; i++) {
ost = output_streams[i];
avcodec_close(ost->enc_ctx);
}
goto dump_format;
}
/* discard unused programs */
for (i = 0; i < nb_input_files; i++) {
InputFile *ifile = input_files[i];
for (j = 0; j < ifile->ctx->nb_programs; j++) {
AVProgram *p = ifile->ctx->programs[j];
int discard = AVDISCARD_ALL;
for (k = 0; k < p->nb_stream_indexes; k++)
if (!input_streams[ifile->ist_index + p->stream_index[k]]->discard) {
discard = AVDISCARD_DEFAULT;
break;
}
p->discard = discard;
}
}
/* open files and write file headers */
for (i = 0; i < nb_output_files; i++) {
oc = output_files[i]->ctx;
oc->interrupt_callback = int_cb;
if ((ret = avformat_write_header(oc, &output_files[i]->opts)) < 0) {
snprintf(error, sizeof(error),
"Could not write header for output file #%d "
"(incorrect codec parameters ?): %s",
i, av_err2str(ret));
ret = AVERROR(EINVAL);
goto dump_format;
}
// assert_avoptions(output_files[i]->opts);
if (strcmp(oc->oformat->name, "rtp")) {
want_sdp = 0;
}
}
dump_format:
/* dump the file output parameters - cannot be done before in case
of stream copy */
for (i = 0; i < nb_output_files; i++) {
av_dump_format(output_files[i]->ctx, i, output_files[i]->ctx->filename, 1);
}
/* dump the stream mapping */
av_log(NULL, AV_LOG_INFO, "Stream mapping:\n");
for (i = 0; i < nb_input_streams; i++) {
ist = input_streams[i];
for (j = 0; j < ist->nb_filters; j++) {
if (ist->filters[j]->graph->graph_desc) {
av_log(NULL, AV_LOG_INFO, " Stream #%d:%d (%s) -> %s",
ist->file_index, ist->st->index, ist->dec ? ist->dec->name : "?",
ist->filters[j]->name);
if (nb_filtergraphs > 1)
av_log(NULL, AV_LOG_INFO, " (graph %d)", ist->filters[j]->graph->index);
av_log(NULL, AV_LOG_INFO, "\n");
}
}
}
for (i = 0; i < nb_output_streams; i++) {
ost = output_streams[i];
if (ost->attachment_filename) {
/* an attached file */
av_log(NULL, AV_LOG_INFO, " File %s -> Stream #%d:%d\n",
ost->attachment_filename, ost->file_index, ost->index);
continue;
}
if (ost->filter && ost->filter->graph->graph_desc) {
/* output from a complex graph */
av_log(NULL, AV_LOG_INFO, " %s", ost->filter->name);
if (nb_filtergraphs > 1)
av_log(NULL, AV_LOG_INFO, " (graph %d)", ost->filter->graph->index);
av_log(NULL, AV_LOG_INFO, " -> Stream #%d:%d (%s)\n", ost->file_index,
ost->index, ost->enc ? ost->enc->name : "?");
continue;
}
av_log(NULL, AV_LOG_INFO, " Stream #%d:%d -> #%d:%d",
input_streams[ost->source_index]->file_index,
input_streams[ost->source_index]->st->index,
ost->file_index,
ost->index);
if (ost->sync_ist != input_streams[ost->source_index])
av_log(NULL, AV_LOG_INFO, " [sync #%d:%d]",
ost->sync_ist->file_index,
ost->sync_ist->st->index);
if (ost->stream_copy)
av_log(NULL, AV_LOG_INFO, " (copy)");
else {
const AVCodec *in_codec = input_streams[ost->source_index]->dec;
const AVCodec *out_codec = ost->enc;
const char *decoder_name = "?";
const char *in_codec_name = "?";
const char *encoder_name = "?";
const char *out_codec_name = "?";
const AVCodecDescriptor *desc;
if (in_codec) {
decoder_name = in_codec->name;
desc = avcodec_descriptor_get(in_codec->id);
if (desc)
in_codec_name = desc->name;
if (!strcmp(decoder_name, in_codec_name))
decoder_name = "native";
}
if (out_codec) {
encoder_name = out_codec->name;
desc = avcodec_descriptor_get(out_codec->id);
if (desc)
out_codec_name = desc->name;
if (!strcmp(encoder_name, out_codec_name))
encoder_name = "native";
}
av_log(NULL, AV_LOG_INFO, " (%s (%s) -> %s (%s))",
in_codec_name, decoder_name,
out_codec_name, encoder_name);
}
av_log(NULL, AV_LOG_INFO, "\n");
}
if (ret) {
av_log(NULL, AV_LOG_ERROR, "%s\n", error);
return ret;
}
if (sdp_filename || want_sdp) {
print_sdp();
}
transcode_init_done = 1;
return 0;
}
| 5,120 |
FFmpeg | f73e3938ac70524826664855210446c3739c4a5e | 0 | static int mp3_read_packet(AVFormatContext *s, AVPacket *pkt)
{
int ret;
ret = av_get_packet(s->pb, pkt, MP3_PACKET_SIZE);
pkt->stream_index = 0;
if (ret <= 0) {
return AVERROR(EIO);
}
if (ret > ID3v1_TAG_SIZE &&
memcmp(&pkt->data[ret - ID3v1_TAG_SIZE], "TAG", 3) == 0)
ret -= ID3v1_TAG_SIZE;
/* note: we need to modify the packet size here to handle the last
packet */
pkt->size = ret;
return ret;
}
| 5,123 |
FFmpeg | 1d16a1cf99488f16492b1bb48e023f4da8377e07 | 0 | static void ff_h264_idct_add_mmx(uint8_t *dst, int16_t *block, int stride)
{
/* Load dct coeffs */
__asm__ volatile(
"movq (%0), %%mm0 \n\t"
"movq 8(%0), %%mm1 \n\t"
"movq 16(%0), %%mm2 \n\t"
"movq 24(%0), %%mm3 \n\t"
:: "r"(block) );
__asm__ volatile(
/* mm1=s02+s13 mm2=s02-s13 mm4=d02+d13 mm0=d02-d13 */
IDCT4_1D( %%mm2, %%mm1, %%mm0, %%mm3, %%mm4 )
"movq %0, %%mm6 \n\t"
/* in: 1,4,0,2 out: 1,2,3,0 */
TRANSPOSE4( %%mm3, %%mm1, %%mm0, %%mm2, %%mm4 )
"paddw %%mm6, %%mm3 \n\t"
/* mm2=s02+s13 mm3=s02-s13 mm4=d02+d13 mm1=d02-d13 */
IDCT4_1D( %%mm4, %%mm2, %%mm3, %%mm0, %%mm1 )
"pxor %%mm7, %%mm7 \n\t"
:: "m"(ff_pw_32));
__asm__ volatile(
STORE_DIFF_4P( %%mm0, %%mm1, %%mm7)
"add %1, %0 \n\t"
STORE_DIFF_4P( %%mm2, %%mm1, %%mm7)
"add %1, %0 \n\t"
STORE_DIFF_4P( %%mm3, %%mm1, %%mm7)
"add %1, %0 \n\t"
STORE_DIFF_4P( %%mm4, %%mm1, %%mm7)
: "+r"(dst)
: "r" ((x86_reg)stride)
);
}
| 5,124 |
FFmpeg | 973b1a6b9070e2bf17d17568cbaf4043ce931f51 | 0 | static void vdadec_flush(AVCodecContext *avctx)
{
return ff_h264_decoder.flush(avctx);
}
| 5,125 |
qemu | 027d9a7d2911e993cdcbd21c7c35d1dd058f05bb | 1 | static inline tcg_target_ulong cpu_tb_exec(CPUState *cpu, TranslationBlock *itb)
{
CPUArchState *env = cpu->env_ptr;
uintptr_t ret;
TranslationBlock *last_tb;
int tb_exit;
uint8_t *tb_ptr = itb->tc_ptr;
qemu_log_mask_and_addr(CPU_LOG_EXEC, itb->pc,
"Trace %p [" TARGET_FMT_lx "] %s\n",
itb->tc_ptr, itb->pc, lookup_symbol(itb->pc));
#if defined(DEBUG_DISAS)
if (qemu_loglevel_mask(CPU_LOG_TB_CPU)
&& qemu_log_in_addr_range(itb->pc)) {
#if defined(TARGET_I386)
log_cpu_state(cpu, CPU_DUMP_CCOP);
#elif defined(TARGET_M68K)
/* ??? Should not modify env state for dumping. */
cpu_m68k_flush_flags(env, env->cc_op);
env->cc_op = CC_OP_FLAGS;
env->sr = (env->sr & 0xffe0) | env->cc_dest | (env->cc_x << 4);
log_cpu_state(cpu, 0);
#else
log_cpu_state(cpu, 0);
#endif
}
#endif /* DEBUG_DISAS */
cpu->can_do_io = !use_icount;
ret = tcg_qemu_tb_exec(env, tb_ptr);
cpu->can_do_io = 1;
last_tb = (TranslationBlock *)(ret & ~TB_EXIT_MASK);
tb_exit = ret & TB_EXIT_MASK;
trace_exec_tb_exit(last_tb, tb_exit);
if (tb_exit > TB_EXIT_IDX1) {
/* We didn't start executing this TB (eg because the instruction
* counter hit zero); we must restore the guest PC to the address
* of the start of the TB.
*/
CPUClass *cc = CPU_GET_CLASS(cpu);
qemu_log_mask_and_addr(CPU_LOG_EXEC, last_tb->pc,
"Stopped execution of TB chain before %p ["
TARGET_FMT_lx "] %s\n",
last_tb->tc_ptr, last_tb->pc,
lookup_symbol(last_tb->pc));
if (cc->synchronize_from_tb) {
cc->synchronize_from_tb(cpu, last_tb);
} else {
assert(cc->set_pc);
cc->set_pc(cpu, last_tb->pc);
}
}
if (tb_exit == TB_EXIT_REQUESTED) {
/* We were asked to stop executing TBs (probably a pending
* interrupt. We've now stopped, so clear the flag.
*/
cpu->tcg_exit_req = 0;
}
return ret;
}
| 5,126 |
qemu | 12351a91da97b414eec8cdb09f1d9f41e535a401 | 1 | static void ac97_class_init (ObjectClass *klass, void *data)
{
DeviceClass *dc = DEVICE_CLASS (klass);
PCIDeviceClass *k = PCI_DEVICE_CLASS (klass);
k->realize = ac97_realize;
k->vendor_id = PCI_VENDOR_ID_INTEL;
k->device_id = PCI_DEVICE_ID_INTEL_82801AA_5;
k->revision = 0x01;
k->class_id = PCI_CLASS_MULTIMEDIA_AUDIO;
set_bit(DEVICE_CATEGORY_SOUND, dc->categories);
dc->desc = "Intel 82801AA AC97 Audio";
dc->vmsd = &vmstate_ac97;
dc->props = ac97_properties;
dc->reset = ac97_on_reset;
} | 5,127 |
FFmpeg | f19af812a32c1398d48c3550d11dbc6aafbb2bfc | 1 | static int adx_decode_init(AVCodecContext * avctx)
{
ADXContext *c = avctx->priv_data;
// printf("adx_decode_init\n"); fflush(stdout);
c->prev[0].s1 = 0;
c->prev[0].s2 = 0;
c->prev[1].s1 = 0;
c->prev[1].s2 = 0;
c->header_parsed = 0;
c->in_temp = 0;
return 0;
}
| 5,128 |
FFmpeg | c8b835954ae4aef797112afda3b52f8dfe3c7b74 | 1 | static int xan_huffman_decode(unsigned char *dest, int dest_len,
const unsigned char *src, int src_len)
{
unsigned char byte = *src++;
unsigned char ival = byte + 0x16;
const unsigned char * ptr = src + byte*2;
int ptr_len = src_len - 1 - byte*2;
unsigned char val = ival;
unsigned char *dest_end = dest + dest_len;
GetBitContext gb;
if (ptr_len < 0)
return AVERROR_INVALIDDATA;
init_get_bits(&gb, ptr, ptr_len * 8);
while ( val != 0x16 ) {
val = src[val - 0x17 + get_bits1(&gb) * byte];
if ( val < 0x16 ) {
if (dest >= dest_end)
return 0;
*dest++ = val;
val = ival;
}
}
return 0;
}
| 5,130 |
FFmpeg | f907615f0813e8499f06a7eebccf1c63fce87c8e | 1 | static int init(AVCodecParserContext *s)
{
H264Context *h = s->priv_data;
h->thread_context[0] = h;
return 0;
} | 5,131 |
FFmpeg | feaf1a739377f8b9ce65e9d3032f3d828d0c15ed | 1 | static int unpack_vectors(Vp3DecodeContext *s, GetBitContext *gb)
{
int i, j, k;
int coding_mode;
int motion_x[6];
int motion_y[6];
int last_motion_x = 0;
int last_motion_y = 0;
int prior_last_motion_x = 0;
int prior_last_motion_y = 0;
int current_macroblock;
int current_fragment;
debug_vp3(" vp3: unpacking motion vectors\n");
if (s->keyframe) {
debug_vp3(" keyframe-- there are no motion vectors\n");
} else {
memset(motion_x, 0, 6 * sizeof(int));
memset(motion_y, 0, 6 * sizeof(int));
/* coding mode 0 is the VLC scheme; 1 is the fixed code scheme */
coding_mode = get_bits1(gb);
debug_vectors(" using %s scheme for unpacking motion vectors\n",
(coding_mode == 0) ? "VLC" : "fixed-length");
/* iterate through all of the macroblocks that contain 1 or more
* coded fragments */
for (i = 0; i < s->u_superblock_start; i++) {
for (j = 0; j < 4; j++) {
current_macroblock = s->superblock_macroblocks[i * 4 + j];
if ((current_macroblock == -1) ||
(s->macroblock_coding[current_macroblock] == MODE_COPY))
continue;
if (current_macroblock >= s->macroblock_count) {
av_log(s->avctx, AV_LOG_ERROR, " vp3:unpack_vectors(): bad macroblock number (%d >= %d)\n",
current_macroblock, s->macroblock_count);
return 1;
}
current_fragment = s->macroblock_fragments[current_macroblock * 6];
if (current_fragment >= s->fragment_count) {
av_log(s->avctx, AV_LOG_ERROR, " vp3:unpack_vectors(): bad fragment number (%d >= %d\n",
current_fragment, s->fragment_count);
return 1;
}
switch (s->macroblock_coding[current_macroblock]) {
case MODE_INTER_PLUS_MV:
case MODE_GOLDEN_MV:
/* all 6 fragments use the same motion vector */
if (coding_mode == 0) {
motion_x[0] = motion_vector_table[get_vlc2(gb, s->motion_vector_vlc.table, 6, 2)];
motion_y[0] = motion_vector_table[get_vlc2(gb, s->motion_vector_vlc.table, 6, 2)];
} else {
motion_x[0] = fixed_motion_vector_table[get_bits(gb, 6)];
motion_y[0] = fixed_motion_vector_table[get_bits(gb, 6)];
}
for (k = 1; k < 6; k++) {
motion_x[k] = motion_x[0];
motion_y[k] = motion_y[0];
}
/* vector maintenance, only on MODE_INTER_PLUS_MV */
if (s->macroblock_coding[current_macroblock] ==
MODE_INTER_PLUS_MV) {
prior_last_motion_x = last_motion_x;
prior_last_motion_y = last_motion_y;
last_motion_x = motion_x[0];
last_motion_y = motion_y[0];
}
break;
case MODE_INTER_FOURMV:
/* vector maintenance */
prior_last_motion_x = last_motion_x;
prior_last_motion_y = last_motion_y;
/* fetch 4 vectors from the bitstream, one for each
* Y fragment, then average for the C fragment vectors */
motion_x[4] = motion_y[4] = 0;
for (k = 0; k < 4; k++) {
if (coding_mode == 0) {
motion_x[k] = motion_vector_table[get_vlc2(gb, s->motion_vector_vlc.table, 6, 2)];
motion_y[k] = motion_vector_table[get_vlc2(gb, s->motion_vector_vlc.table, 6, 2)];
} else {
motion_x[k] = fixed_motion_vector_table[get_bits(gb, 6)];
motion_y[k] = fixed_motion_vector_table[get_bits(gb, 6)];
}
last_motion_x = motion_x[k];
last_motion_y = motion_y[k];
motion_x[4] += motion_x[k];
motion_y[4] += motion_y[k];
}
motion_x[5]=
motion_x[4]= RSHIFT(motion_x[4], 2);
motion_y[5]=
motion_y[4]= RSHIFT(motion_y[4], 2);
break;
case MODE_INTER_LAST_MV:
/* all 6 fragments use the last motion vector */
motion_x[0] = last_motion_x;
motion_y[0] = last_motion_y;
for (k = 1; k < 6; k++) {
motion_x[k] = motion_x[0];
motion_y[k] = motion_y[0];
}
/* no vector maintenance (last vector remains the
* last vector) */
break;
case MODE_INTER_PRIOR_LAST:
/* all 6 fragments use the motion vector prior to the
* last motion vector */
motion_x[0] = prior_last_motion_x;
motion_y[0] = prior_last_motion_y;
for (k = 1; k < 6; k++) {
motion_x[k] = motion_x[0];
motion_y[k] = motion_y[0];
}
/* vector maintenance */
prior_last_motion_x = last_motion_x;
prior_last_motion_y = last_motion_y;
last_motion_x = motion_x[0];
last_motion_y = motion_y[0];
break;
default:
/* covers intra, inter without MV, golden without MV */
memset(motion_x, 0, 6 * sizeof(int));
memset(motion_y, 0, 6 * sizeof(int));
/* no vector maintenance */
break;
}
/* assign the motion vectors to the correct fragments */
debug_vectors(" vectors for macroblock starting @ fragment %d (coding method %d):\n",
current_fragment,
s->macroblock_coding[current_macroblock]);
for (k = 0; k < 6; k++) {
current_fragment =
s->macroblock_fragments[current_macroblock * 6 + k];
if (current_fragment == -1)
continue;
if (current_fragment >= s->fragment_count) {
av_log(s->avctx, AV_LOG_ERROR, " vp3:unpack_vectors(): bad fragment number (%d >= %d)\n",
current_fragment, s->fragment_count);
return 1;
}
s->all_fragments[current_fragment].motion_x = motion_x[k];
s->all_fragments[current_fragment].motion_y = motion_y[k];
debug_vectors(" vector %d: fragment %d = (%d, %d)\n",
k, current_fragment, motion_x[k], motion_y[k]);
}
}
}
}
return 0;
}
| 5,132 |
FFmpeg | 32be264cea542b4dc721b10092bf1dfe511a28ee | 1 | static void avoid_clipping(AACEncContext *s, SingleChannelElement *sce)
{
int start, i, j, w;
if (sce->ics.clip_avoidance_factor < 1.0f) {
for (w = 0; w < sce->ics.num_windows; w++) {
start = 0;
for (i = 0; i < sce->ics.max_sfb; i++) {
float *swb_coeffs = sce->coeffs + start + w*128;
for (j = 0; j < sce->ics.swb_sizes[i]; j++)
swb_coeffs[j] *= sce->ics.clip_avoidance_factor;
start += sce->ics.swb_sizes[i];
}
}
}
}
| 5,133 |
qemu | 8be487d8f184f2f721cabeac559fb7a6cba18c95 | 1 | static void sdhci_end_transfer(SDHCIState *s)
{
/* Automatically send CMD12 to stop transfer if AutoCMD12 enabled */
if ((s->trnmod & SDHC_TRNS_ACMD12) != 0) {
SDRequest request;
uint8_t response[16];
request.cmd = 0x0C;
request.arg = 0;
DPRINT_L1("Automatically issue CMD%d %08x\n", request.cmd, request.arg);
sdbus_do_command(&s->sdbus, &request, response);
/* Auto CMD12 response goes to the upper Response register */
s->rspreg[3] = (response[0] << 24) | (response[1] << 16) |
(response[2] << 8) | response[3];
}
s->prnsts &= ~(SDHC_DOING_READ | SDHC_DOING_WRITE |
SDHC_DAT_LINE_ACTIVE | SDHC_DATA_INHIBIT |
SDHC_SPACE_AVAILABLE | SDHC_DATA_AVAILABLE);
if (s->norintstsen & SDHC_NISEN_TRSCMP) {
s->norintsts |= SDHC_NIS_TRSCMP;
}
sdhci_update_irq(s);
}
| 5,134 |
FFmpeg | a237b38021cd3009cc78eeb974b596085f2fe393 | 1 | int ff_ps_read_data(AVCodecContext *avctx, GetBitContext *gb_host, PSContext *ps, int bits_left)
{
int e;
int bit_count_start = get_bits_count(gb_host);
int header;
int bits_consumed;
GetBitContext gbc = *gb_host, *gb = &gbc;
header = get_bits1(gb);
if (header) { //enable_ps_header
ps->enable_iid = get_bits1(gb);
if (ps->enable_iid) {
int iid_mode = get_bits(gb, 3);
if (iid_mode > 5) {
av_log(avctx, AV_LOG_ERROR, "iid_mode %d is reserved.\n",
iid_mode);
goto err;
}
ps->nr_iid_par = nr_iidicc_par_tab[iid_mode];
ps->iid_quant = iid_mode > 2;
ps->nr_ipdopd_par = nr_iidopd_par_tab[iid_mode];
}
ps->enable_icc = get_bits1(gb);
if (ps->enable_icc) {
ps->icc_mode = get_bits(gb, 3);
if (ps->icc_mode > 5) {
av_log(avctx, AV_LOG_ERROR, "icc_mode %d is reserved.\n",
ps->icc_mode);
goto err;
}
ps->nr_icc_par = nr_iidicc_par_tab[ps->icc_mode];
}
ps->enable_ext = get_bits1(gb);
}
ps->frame_class = get_bits1(gb);
ps->num_env_old = ps->num_env;
ps->num_env = num_env_tab[ps->frame_class][get_bits(gb, 2)];
ps->border_position[0] = -1;
if (ps->frame_class) {
for (e = 1; e <= ps->num_env; e++)
ps->border_position[e] = get_bits(gb, 5);
} else
for (e = 1; e <= ps->num_env; e++)
ps->border_position[e] = (e * numQMFSlots >> ff_log2_tab[ps->num_env]) - 1;
if (ps->enable_iid) {
for (e = 0; e < ps->num_env; e++) {
int dt = get_bits1(gb);
if (read_iid_data(avctx, gb, ps, ps->iid_par, huff_iid[2*dt+ps->iid_quant], e, dt))
goto err;
}
} else
if (ps->enable_icc)
for (e = 0; e < ps->num_env; e++) {
int dt = get_bits1(gb);
if (read_icc_data(avctx, gb, ps, ps->icc_par, dt ? huff_icc_dt : huff_icc_df, e, dt))
goto err;
}
else
if (ps->enable_ext) {
int cnt = get_bits(gb, 4);
if (cnt == 15) {
cnt += get_bits(gb, 8);
}
cnt *= 8;
while (cnt > 7) {
int ps_extension_id = get_bits(gb, 2);
cnt -= 2 + ps_read_extension_data(gb, ps, ps_extension_id);
}
if (cnt < 0) {
av_log(avctx, AV_LOG_ERROR, "ps extension overflow %d\n", cnt);
goto err;
}
skip_bits(gb, cnt);
}
ps->enable_ipdopd &= !PS_BASELINE;
//Fix up envelopes
if (!ps->num_env || ps->border_position[ps->num_env] < numQMFSlots - 1) {
//Create a fake envelope
int source = ps->num_env ? ps->num_env - 1 : ps->num_env_old - 1;
if (source >= 0 && source != ps->num_env) {
if (ps->enable_iid) {
memcpy(ps->iid_par+ps->num_env, ps->iid_par+source, sizeof(ps->iid_par[0]));
}
if (ps->enable_icc) {
memcpy(ps->icc_par+ps->num_env, ps->icc_par+source, sizeof(ps->icc_par[0]));
}
if (ps->enable_ipdopd) {
memcpy(ps->ipd_par+ps->num_env, ps->ipd_par+source, sizeof(ps->ipd_par[0]));
memcpy(ps->opd_par+ps->num_env, ps->opd_par+source, sizeof(ps->opd_par[0]));
}
}
ps->num_env++;
ps->border_position[ps->num_env] = numQMFSlots - 1;
}
ps->is34bands_old = ps->is34bands;
if (!PS_BASELINE && (ps->enable_iid || ps->enable_icc))
ps->is34bands = (ps->enable_iid && ps->nr_iid_par == 34) ||
(ps->enable_icc && ps->nr_icc_par == 34);
//Baseline
if (!ps->enable_ipdopd) {
}
if (header)
ps->start = 1;
bits_consumed = get_bits_count(gb) - bit_count_start;
if (bits_consumed <= bits_left) {
skip_bits_long(gb_host, bits_consumed);
return bits_consumed;
}
av_log(avctx, AV_LOG_ERROR, "Expected to read %d PS bits actually read %d.\n", bits_left, bits_consumed);
err:
ps->start = 0;
skip_bits_long(gb_host, bits_left);
return bits_left;
} | 5,135 |
FFmpeg | d0a882ab1d2a4197da1edd77450af30e2da3460e | 1 | ff_vorbis_comment(AVFormatContext * as, AVDictionary **m, const uint8_t *buf, int size)
{
const uint8_t *p = buf;
const uint8_t *end = buf + size;
unsigned n, j;
int s;
if (size < 8) /* must have vendor_length and user_comment_list_length */
return -1;
s = bytestream_get_le32(&p);
if (end - p - 4 < s || s < 0)
return -1;
p += s;
n = bytestream_get_le32(&p);
while (end - p >= 4 && n > 0) {
const char *t, *v;
int tl, vl;
s = bytestream_get_le32(&p);
if (end - p < s || s < 0)
break;
t = p;
p += s;
n--;
v = memchr(t, '=', s);
if (!v)
continue;
tl = v - t;
vl = s - tl - 1;
v++;
if (tl && vl) {
char *tt, *ct;
tt = av_malloc(tl + 1);
ct = av_malloc(vl + 1);
if (!tt || !ct) {
av_freep(&tt);
av_log(as, AV_LOG_WARNING, "out-of-memory error. skipping VorbisComment tag.\n");
continue;
}
for (j = 0; j < tl; j++)
tt[j] = av_toupper(t[j]);
tt[tl] = 0;
memcpy(ct, v, vl);
ct[vl] = 0;
if (!strcmp(tt, "METADATA_BLOCK_PICTURE")) {
int ret;
char *pict = av_malloc(vl);
if (!pict) {
av_log(as, AV_LOG_WARNING, "out-of-memory error. Skipping cover art block.\n");
continue;
}
if ((ret = av_base64_decode(pict, ct, vl)) > 0)
ret = ff_flac_parse_picture(as, pict, ret);
av_freep(&pict);
if (ret < 0) {
av_log(as, AV_LOG_WARNING, "Failed to parse cover art block.\n");
continue;
}
} else if (!ogm_chapter(as, tt, ct))
av_dict_set(m, tt, ct,
AV_DICT_DONT_STRDUP_KEY |
AV_DICT_DONT_STRDUP_VAL);
}
}
if (p != end)
av_log(as, AV_LOG_INFO, "%ti bytes of comment header remain\n", end-p);
if (n > 0)
av_log(as, AV_LOG_INFO,
"truncated comment header, %i comments not found\n", n);
ff_metadata_conv(m, NULL, ff_vorbiscomment_metadata_conv);
return 0;
} | 5,136 |
qemu | 9dc44aa5829eb3131a01378a738dee28a382bbc1 | 1 | static void *do_touch_pages(void *arg)
{
MemsetThread *memset_args = (MemsetThread *)arg;
char *addr = memset_args->addr;
uint64_t numpages = memset_args->numpages;
uint64_t hpagesize = memset_args->hpagesize;
sigset_t set, oldset;
int i = 0;
/* unblock SIGBUS */
sigemptyset(&set);
sigaddset(&set, SIGBUS);
pthread_sigmask(SIG_UNBLOCK, &set, &oldset);
if (sigsetjmp(memset_args->env, 1)) {
memset_thread_failed = true;
} else {
for (i = 0; i < numpages; i++) {
memset(addr, 0, 1);
addr += hpagesize;
}
}
pthread_sigmask(SIG_SETMASK, &oldset, NULL);
return NULL;
}
| 5,139 |
FFmpeg | 8b58f6b5c286ba14ec9c6c9e03f9ede8c6be9181 | 0 | static int raw_decode(AVCodecContext *avctx,
void *data, int *data_size,
AVPacket *avpkt)
{
const uint8_t *buf = avpkt->data;
int buf_size = avpkt->size;
int linesize_align = 4;
RawVideoContext *context = avctx->priv_data;
AVFrame *frame = data;
AVPicture *picture = data;
frame->pict_type = avctx->coded_frame->pict_type;
frame->interlaced_frame = avctx->coded_frame->interlaced_frame;
frame->top_field_first = avctx->coded_frame->top_field_first;
frame->reordered_opaque = avctx->reordered_opaque;
frame->pkt_pts = avctx->pkt->pts;
frame->pkt_pos = avctx->pkt->pos;
if(context->tff>=0){
frame->interlaced_frame = 1;
frame->top_field_first = context->tff;
}
if(buf_size < context->length - (avctx->pix_fmt==PIX_FMT_PAL8 ? 256*4 : 0))
return -1;
if (avctx->width <= 0 || avctx->height <= 0) {
av_log(avctx, AV_LOG_ERROR, "w/h is invalid\n");
return AVERROR(EINVAL);
}
//2bpp and 4bpp raw in avi and mov (yes this is ugly ...)
if (context->buffer) {
int i;
uint8_t *dst = context->buffer;
buf_size = context->length - 256*4;
if (avctx->bits_per_coded_sample == 4){
for(i=0; 2*i+1 < buf_size && i<avpkt->size; i++){
dst[2*i+0]= buf[i]>>4;
dst[2*i+1]= buf[i]&15;
}
linesize_align = 8;
} else {
for(i=0; 4*i+3 < buf_size && i<avpkt->size; i++){
dst[4*i+0]= buf[i]>>6;
dst[4*i+1]= buf[i]>>4&3;
dst[4*i+2]= buf[i]>>2&3;
dst[4*i+3]= buf[i] &3;
}
linesize_align = 16;
}
buf= dst;
}
if(avctx->codec_tag == MKTAG('A', 'V', '1', 'x') ||
avctx->codec_tag == MKTAG('A', 'V', 'u', 'p'))
buf += buf_size - context->length;
avpicture_fill(picture, buf, avctx->pix_fmt, avctx->width, avctx->height);
if((avctx->pix_fmt==PIX_FMT_PAL8 && buf_size < context->length) ||
(av_pix_fmt_descriptors[avctx->pix_fmt].flags & PIX_FMT_PSEUDOPAL)) {
frame->data[1]= context->palette;
}
if (avctx->pix_fmt == PIX_FMT_PAL8) {
const uint8_t *pal = av_packet_get_side_data(avpkt, AV_PKT_DATA_PALETTE, NULL);
if (pal) {
memcpy(frame->data[1], pal, AVPALETTE_SIZE);
frame->palette_has_changed = 1;
}
}
if((avctx->pix_fmt==PIX_FMT_BGR24 ||
avctx->pix_fmt==PIX_FMT_GRAY8 ||
avctx->pix_fmt==PIX_FMT_RGB555LE ||
avctx->pix_fmt==PIX_FMT_RGB555BE ||
avctx->pix_fmt==PIX_FMT_RGB565LE ||
avctx->pix_fmt==PIX_FMT_MONOWHITE ||
avctx->pix_fmt==PIX_FMT_PAL8) &&
FFALIGN(frame->linesize[0], linesize_align)*avctx->height <= buf_size)
frame->linesize[0] = FFALIGN(frame->linesize[0], linesize_align);
if(context->flip)
flip(avctx, picture);
if ( avctx->codec_tag == MKTAG('Y', 'V', '1', '2')
|| avctx->codec_tag == MKTAG('Y', 'V', '1', '6')
|| avctx->codec_tag == MKTAG('Y', 'V', '2', '4')
|| avctx->codec_tag == MKTAG('Y', 'V', 'U', '9'))
FFSWAP(uint8_t *, picture->data[1], picture->data[2]);
if(avctx->codec_tag == AV_RL32("yuv2") &&
avctx->pix_fmt == PIX_FMT_YUYV422) {
int x, y;
uint8_t *line = picture->data[0];
for(y = 0; y < avctx->height; y++) {
for(x = 0; x < avctx->width; x++)
line[2*x + 1] ^= 0x80;
line += picture->linesize[0];
}
}
*data_size = sizeof(AVPicture);
return buf_size;
}
| 5,140 |
FFmpeg | 5331773cc33ba26b9e26ace643d926219e46a17b | 0 | static int oma_read_header(AVFormatContext *s)
{
int ret, framesize, jsflag, samplerate;
uint32_t codec_params, channel_id;
int16_t eid;
uint8_t buf[EA3_HEADER_SIZE];
uint8_t *edata;
AVStream *st;
ID3v2ExtraMeta *extra_meta = NULL;
OMAContext *oc = s->priv_data;
ff_id3v2_read(s, ID3v2_EA3_MAGIC, &extra_meta);
ret = avio_read(s->pb, buf, EA3_HEADER_SIZE);
if (ret < EA3_HEADER_SIZE)
return -1;
if (memcmp(buf, ((const uint8_t[]){'E', 'A', '3'}), 3) ||
buf[4] != 0 || buf[5] != EA3_HEADER_SIZE) {
av_log(s, AV_LOG_ERROR, "Couldn't find the EA3 header !\n");
return AVERROR_INVALIDDATA;
}
oc->content_start = avio_tell(s->pb);
/* encrypted file */
eid = AV_RB16(&buf[6]);
if (eid != -1 && eid != -128 && decrypt_init(s, extra_meta, buf) < 0) {
ff_id3v2_free_extra_meta(&extra_meta);
return -1;
}
ff_id3v2_free_extra_meta(&extra_meta);
codec_params = AV_RB24(&buf[33]);
st = avformat_new_stream(s, NULL);
if (!st)
return AVERROR(ENOMEM);
st->start_time = 0;
st->codec->codec_type = AVMEDIA_TYPE_AUDIO;
st->codec->codec_tag = buf[32];
st->codec->codec_id = ff_codec_get_id(ff_oma_codec_tags,
st->codec->codec_tag);
switch (buf[32]) {
case OMA_CODECID_ATRAC3:
samplerate = ff_oma_srate_tab[(codec_params >> 13) & 7] * 100;
if (!samplerate) {
av_log(s, AV_LOG_ERROR, "Unsupported sample rate\n");
return AVERROR_INVALIDDATA;
}
if (samplerate != 44100)
avpriv_request_sample(s, "Sample rate %d", samplerate);
framesize = (codec_params & 0x3FF) * 8;
/* get stereo coding mode, 1 for joint-stereo */
jsflag = (codec_params >> 17) & 1;
st->codec->channels = 2;
st->codec->channel_layout = AV_CH_LAYOUT_STEREO;
st->codec->sample_rate = samplerate;
st->codec->bit_rate = st->codec->sample_rate * framesize * 8 / 1024;
/* fake the ATRAC3 extradata
* (wav format, makes stream copy to wav work) */
if (ff_alloc_extradata(st->codec, 14))
return AVERROR(ENOMEM);
edata = st->codec->extradata;
AV_WL16(&edata[0], 1); // always 1
AV_WL32(&edata[2], samplerate); // samples rate
AV_WL16(&edata[6], jsflag); // coding mode
AV_WL16(&edata[8], jsflag); // coding mode
AV_WL16(&edata[10], 1); // always 1
// AV_WL16(&edata[12], 0); // always 0
avpriv_set_pts_info(st, 64, 1, st->codec->sample_rate);
break;
case OMA_CODECID_ATRAC3P:
channel_id = (codec_params >> 10) & 7;
if (!channel_id) {
av_log(s, AV_LOG_ERROR,
"Invalid ATRAC-X channel id: %"PRIu32"\n", channel_id);
return AVERROR_INVALIDDATA;
}
st->codec->channel_layout = ff_oma_chid_to_native_layout[channel_id - 1];
st->codec->channels = ff_oma_chid_to_num_channels[channel_id - 1];
framesize = ((codec_params & 0x3FF) * 8) + 8;
samplerate = ff_oma_srate_tab[(codec_params >> 13) & 7] * 100;
if (!samplerate) {
av_log(s, AV_LOG_ERROR, "Unsupported sample rate\n");
return AVERROR_INVALIDDATA;
}
st->codec->sample_rate = samplerate;
st->codec->bit_rate = samplerate * framesize * 8 / 2048;
avpriv_set_pts_info(st, 64, 1, samplerate);
break;
case OMA_CODECID_MP3:
st->need_parsing = AVSTREAM_PARSE_FULL_RAW;
framesize = 1024;
break;
case OMA_CODECID_LPCM:
/* PCM 44.1 kHz 16 bit stereo big-endian */
st->codec->channels = 2;
st->codec->channel_layout = AV_CH_LAYOUT_STEREO;
st->codec->sample_rate = 44100;
framesize = 1024;
/* bit rate = sample rate x PCM block align (= 4) x 8 */
st->codec->bit_rate = st->codec->sample_rate * 32;
st->codec->bits_per_coded_sample =
av_get_bits_per_sample(st->codec->codec_id);
avpriv_set_pts_info(st, 64, 1, st->codec->sample_rate);
break;
default:
av_log(s, AV_LOG_ERROR, "Unsupported codec %d!\n", buf[32]);
return AVERROR(ENOSYS);
}
st->codec->block_align = framesize;
return 0;
}
| 5,141 |
FFmpeg | 70d54392f5015b9c6594fcae558f59f952501e3b | 0 | void avcodec_align_dimensions2(AVCodecContext *s, int *width, int *height,
int linesize_align[AV_NUM_DATA_POINTERS])
{
int i;
int w_align= 1;
int h_align= 1;
switch(s->pix_fmt){
case PIX_FMT_YUV420P:
case PIX_FMT_YUYV422:
case PIX_FMT_UYVY422:
case PIX_FMT_YUV422P:
case PIX_FMT_YUV440P:
case PIX_FMT_YUV444P:
case PIX_FMT_GBRP:
case PIX_FMT_GRAY8:
case PIX_FMT_GRAY16BE:
case PIX_FMT_GRAY16LE:
case PIX_FMT_YUVJ420P:
case PIX_FMT_YUVJ422P:
case PIX_FMT_YUVJ440P:
case PIX_FMT_YUVJ444P:
case PIX_FMT_YUVA420P:
case PIX_FMT_YUVA444P:
case PIX_FMT_YUV420P9LE:
case PIX_FMT_YUV420P9BE:
case PIX_FMT_YUV420P10LE:
case PIX_FMT_YUV420P10BE:
case PIX_FMT_YUV422P9LE:
case PIX_FMT_YUV422P9BE:
case PIX_FMT_YUV422P10LE:
case PIX_FMT_YUV422P10BE:
case PIX_FMT_YUV444P9LE:
case PIX_FMT_YUV444P9BE:
case PIX_FMT_YUV444P10LE:
case PIX_FMT_YUV444P10BE:
case PIX_FMT_GBRP9LE:
case PIX_FMT_GBRP9BE:
case PIX_FMT_GBRP10LE:
case PIX_FMT_GBRP10BE:
w_align = 16; //FIXME assume 16 pixel per macroblock
h_align = 16 * 2; // interlaced needs 2 macroblocks height
break;
case PIX_FMT_YUV411P:
case PIX_FMT_UYYVYY411:
w_align=32;
h_align=8;
break;
case PIX_FMT_YUV410P:
if(s->codec_id == CODEC_ID_SVQ1){
w_align=64;
h_align=64;
}
case PIX_FMT_RGB555:
if(s->codec_id == CODEC_ID_RPZA){
w_align=4;
h_align=4;
}
case PIX_FMT_PAL8:
case PIX_FMT_BGR8:
case PIX_FMT_RGB8:
if(s->codec_id == CODEC_ID_SMC){
w_align=4;
h_align=4;
}
break;
case PIX_FMT_BGR24:
if((s->codec_id == CODEC_ID_MSZH) || (s->codec_id == CODEC_ID_ZLIB)){
w_align=4;
h_align=4;
}
break;
default:
w_align= 1;
h_align= 1;
break;
}
if(s->codec_id == CODEC_ID_IFF_ILBM || s->codec_id == CODEC_ID_IFF_BYTERUN1){
w_align= FFMAX(w_align, 8);
}
*width = FFALIGN(*width , w_align);
*height= FFALIGN(*height, h_align);
if (s->codec_id == CODEC_ID_H264)
*height+=2; // some of the optimized chroma MC reads one line too much
for (i = 0; i < 4; i++)
linesize_align[i] = STRIDE_ALIGN;
}
| 5,142 |
FFmpeg | 39185ec4faa9ef33954dbf2394444e045b632673 | 0 | static int decode_mb_i(AVSContext *h, int cbp_code)
{
GetBitContext *gb = &h->gb;
unsigned pred_mode_uv;
int block;
uint8_t top[18];
uint8_t *left = NULL;
uint8_t *d;
ff_cavs_init_mb(h);
/* get intra prediction modes from stream */
for (block = 0; block < 4; block++) {
int nA, nB, predpred;
int pos = scan3x3[block];
nA = h->pred_mode_Y[pos - 1];
nB = h->pred_mode_Y[pos - 3];
predpred = FFMIN(nA, nB);
if (predpred == NOT_AVAIL) // if either is not available
predpred = INTRA_L_LP;
if (!get_bits1(gb)) {
int rem_mode = get_bits(gb, 2);
predpred = rem_mode + (rem_mode >= predpred);
}
h->pred_mode_Y[pos] = predpred;
}
pred_mode_uv = get_ue_golomb(gb);
if (pred_mode_uv > 6) {
av_log(h->avctx, AV_LOG_ERROR, "illegal intra chroma pred mode\n");
return AVERROR_INVALIDDATA;
}
ff_cavs_modify_mb_i(h, &pred_mode_uv);
/* get coded block pattern */
if (h->cur.f->pict_type == AV_PICTURE_TYPE_I)
cbp_code = get_ue_golomb(gb);
if (cbp_code > 63) {
av_log(h->avctx, AV_LOG_ERROR, "illegal intra cbp\n");
return AVERROR_INVALIDDATA;
}
h->cbp = cbp_tab[cbp_code][0];
if (h->cbp && !h->qp_fixed)
h->qp = (h->qp + get_se_golomb(gb)) & 63; //qp_delta
/* luma intra prediction interleaved with residual decode/transform/add */
for (block = 0; block < 4; block++) {
d = h->cy + h->luma_scan[block];
ff_cavs_load_intra_pred_luma(h, top, &left, block);
h->intra_pred_l[h->pred_mode_Y[scan3x3[block]]]
(d, top, left, h->l_stride);
if (h->cbp & (1<<block))
decode_residual_block(h, gb, intra_dec, 1, h->qp, d, h->l_stride);
}
/* chroma intra prediction */
ff_cavs_load_intra_pred_chroma(h);
h->intra_pred_c[pred_mode_uv](h->cu, &h->top_border_u[h->mbx * 10],
h->left_border_u, h->c_stride);
h->intra_pred_c[pred_mode_uv](h->cv, &h->top_border_v[h->mbx * 10],
h->left_border_v, h->c_stride);
decode_residual_chroma(h);
ff_cavs_filter(h, I_8X8);
set_mv_intra(h);
return 0;
}
| 5,143 |
FFmpeg | 3bd1162a52cafc1bb758b25636e857c94fd3999c | 1 | static int gif_read_image(GifState *s)
{
int left, top, width, height, bits_per_pixel, code_size, flags;
int is_interleaved, has_local_palette, y, pass, y1, linesize, n, i;
uint8_t *ptr, *spal, *palette, *ptr1;
left = bytestream_get_le16(&s->bytestream);
top = bytestream_get_le16(&s->bytestream);
width = bytestream_get_le16(&s->bytestream);
height = bytestream_get_le16(&s->bytestream);
flags = bytestream_get_byte(&s->bytestream);
is_interleaved = flags & 0x40;
has_local_palette = flags & 0x80;
bits_per_pixel = (flags & 0x07) + 1;
av_dlog(s->avctx, "gif: image x=%d y=%d w=%d h=%d\n", left, top, width, height);
if (has_local_palette) {
bytestream_get_buffer(&s->bytestream, s->local_palette, 3 * (1 << bits_per_pixel));
palette = s->local_palette;
} else {
palette = s->global_palette;
bits_per_pixel = s->bits_per_pixel;
}
/* verify that all the image is inside the screen dimensions */
if (left + width > s->screen_width ||
top + height > s->screen_height)
return AVERROR(EINVAL);
/* build the palette */
n = (1 << bits_per_pixel);
spal = palette;
for(i = 0; i < n; i++) {
s->image_palette[i] = (0xff << 24) | AV_RB24(spal);
spal += 3;
}
for(; i < 256; i++)
s->image_palette[i] = (0xff << 24);
/* handle transparency */
if (s->transparent_color_index >= 0)
s->image_palette[s->transparent_color_index] = 0;
/* now get the image data */
code_size = bytestream_get_byte(&s->bytestream);
ff_lzw_decode_init(s->lzw, code_size, s->bytestream,
s->bytestream_end - s->bytestream, FF_LZW_GIF);
/* read all the image */
linesize = s->picture.linesize[0];
ptr1 = s->picture.data[0] + top * linesize + left;
ptr = ptr1;
pass = 0;
y1 = 0;
for (y = 0; y < height; y++) {
ff_lzw_decode(s->lzw, ptr, width);
if (is_interleaved) {
switch(pass) {
default:
case 0:
case 1:
y1 += 8;
ptr += linesize * 8;
if (y1 >= height) {
y1 = pass ? 2 : 4;
ptr = ptr1 + linesize * y1;
pass++;
}
break;
case 2:
y1 += 4;
ptr += linesize * 4;
if (y1 >= height) {
y1 = 1;
ptr = ptr1 + linesize;
pass++;
}
break;
case 3:
y1 += 2;
ptr += linesize * 2;
break;
}
} else {
ptr += linesize;
}
}
/* read the garbage data until end marker is found */
ff_lzw_decode_tail(s->lzw);
s->bytestream = ff_lzw_cur_ptr(s->lzw);
return 0;
}
| 5,144 |
FFmpeg | 20e8be0c20c7b51964fa4d317073bd36b983eb55 | 1 | static int mkv_write_tags(AVFormatContext *s)
{
MatroskaMuxContext *mkv = s->priv_data;
int i, ret;
ff_metadata_conv_ctx(s, ff_mkv_metadata_conv, NULL);
if (mkv_check_tag(s->metadata, 0)) {
ret = mkv_write_tag(s, s->metadata, 0, 0, &mkv->tags);
if (ret < 0) return ret;
}
for (i = 0; i < s->nb_streams; i++) {
AVStream *st = s->streams[i];
if (st->codecpar->codec_type == AVMEDIA_TYPE_ATTACHMENT)
continue;
if (!mkv_check_tag(st->metadata, MATROSKA_ID_TAGTARGETS_TRACKUID))
continue;
ret = mkv_write_tag(s, st->metadata, MATROSKA_ID_TAGTARGETS_TRACKUID, i + 1, &mkv->tags);
if (ret < 0) return ret;
}
if (s->pb->seekable && !mkv->is_live) {
for (i = 0; i < s->nb_streams; i++) {
AVIOContext *pb;
AVStream *st = s->streams[i];
ebml_master tag_target;
ebml_master tag;
if (st->codecpar->codec_type == AVMEDIA_TYPE_ATTACHMENT)
continue;
mkv_write_tag_targets(s, MATROSKA_ID_TAGTARGETS_TRACKUID, i + 1, &mkv->tags, &tag_target);
pb = mkv->tags_bc;
tag = start_ebml_master(pb, MATROSKA_ID_SIMPLETAG, 0);
put_ebml_string(pb, MATROSKA_ID_TAGNAME, "DURATION");
mkv->stream_duration_offsets[i] = avio_tell(pb);
// Reserve space to write duration as a 20-byte string.
// 2 (ebml id) + 1 (data size) + 20 (data)
put_ebml_void(pb, 23);
end_ebml_master(pb, tag);
end_ebml_master(pb, tag_target);
}
}
for (i = 0; i < s->nb_chapters; i++) {
AVChapter *ch = s->chapters[i];
if (!mkv_check_tag(ch->metadata, MATROSKA_ID_TAGTARGETS_CHAPTERUID))
continue;
ret = mkv_write_tag(s, ch->metadata, MATROSKA_ID_TAGTARGETS_CHAPTERUID, ch->id + mkv->chapter_id_offset, &mkv->tags);
if (ret < 0) return ret;
}
if (mkv->have_attachments) {
for (i = 0; i < mkv->attachments->num_entries; i++) {
mkv_attachment *attachment = &mkv->attachments->entries[i];
AVStream *st = s->streams[attachment->stream_idx];
if (!mkv_check_tag(st->metadata, MATROSKA_ID_TAGTARGETS_ATTACHUID))
continue;
ret = mkv_write_tag(s, st->metadata, MATROSKA_ID_TAGTARGETS_ATTACHUID, attachment->fileuid, &mkv->tags);
if (ret < 0)
return ret;
}
}
if (mkv->tags.pos) {
if (s->pb->seekable && !mkv->is_live)
put_ebml_void(s->pb, avio_tell(mkv->tags_bc));
else
end_ebml_master_crc32(s->pb, &mkv->tags_bc, mkv, mkv->tags);
}
return 0;
}
| 5,145 |
FFmpeg | 57ae94a3c0fced20464d9ae351efc977d964be38 | 1 | static int hls_delete_old_segments(HLSContext *hls) {
HLSSegment *segment, *previous_segment = NULL;
float playlist_duration = 0.0f;
int ret = 0, path_size, sub_path_size;
char *dirname = NULL, *p, *sub_path;
char *path = NULL;
segment = hls->segments;
while (segment) {
playlist_duration += segment->duration;
segment = segment->next;
}
segment = hls->old_segments;
while (segment) {
playlist_duration -= segment->duration;
previous_segment = segment;
segment = previous_segment->next;
if (playlist_duration <= -previous_segment->duration) {
previous_segment->next = NULL;
break;
}
}
if (segment && !hls->use_localtime_mkdir) {
if (hls->segment_filename) {
dirname = av_strdup(hls->segment_filename);
} else {
dirname = av_strdup(hls->avf->filename);
}
if (!dirname) {
ret = AVERROR(ENOMEM);
goto fail;
}
p = (char *)av_basename(dirname);
*p = '\0';
}
while (segment) {
av_log(hls, AV_LOG_DEBUG, "deleting old segment %s\n",
segment->filename);
path_size = (hls->use_localtime_mkdir ? 0 : strlen(dirname)) + strlen(segment->filename) + 1;
path = av_malloc(path_size);
if (!path) {
ret = AVERROR(ENOMEM);
goto fail;
}
if (hls->use_localtime_mkdir)
av_strlcpy(path, segment->filename, path_size);
else { // segment->filename contains basename only
av_strlcpy(path, dirname, path_size);
av_strlcat(path, segment->filename, path_size);
}
if (unlink(path) < 0) {
av_log(hls, AV_LOG_ERROR, "failed to delete old segment %s: %s\n",
path, strerror(errno));
}
if (segment->sub_filename[0] != '\0') {
sub_path_size = strlen(dirname) + strlen(segment->sub_filename) + 1;
sub_path = av_malloc(sub_path_size);
if (!sub_path) {
ret = AVERROR(ENOMEM);
goto fail;
}
av_strlcpy(sub_path, dirname, sub_path_size);
av_strlcat(sub_path, segment->sub_filename, sub_path_size);
if (unlink(sub_path) < 0) {
av_log(hls, AV_LOG_ERROR, "failed to delete old segment %s: %s\n",
sub_path, strerror(errno));
}
av_free(sub_path);
}
av_freep(&path);
previous_segment = segment;
segment = previous_segment->next;
av_free(previous_segment);
}
fail:
av_free(path);
av_free(dirname);
return ret;
}
| 5,147 |
qemu | 56439e9d55626b65ecb887f1ac3714652555312e | 1 | qcow_co_pwritev_compressed(BlockDriverState *bs, uint64_t offset,
uint64_t bytes, QEMUIOVector *qiov)
{
BDRVQcowState *s = bs->opaque;
QEMUIOVector hd_qiov;
struct iovec iov;
z_stream strm;
int ret, out_len;
uint8_t *buf, *out_buf;
uint64_t cluster_offset;
buf = qemu_blockalign(bs, s->cluster_size);
if (bytes != s->cluster_size) {
if (bytes > s->cluster_size ||
offset + bytes != bs->total_sectors << BDRV_SECTOR_BITS)
{
qemu_vfree(buf);
return -EINVAL;
}
/* Zero-pad last write if image size is not cluster aligned */
memset(buf + bytes, 0, s->cluster_size - bytes);
}
qemu_iovec_to_buf(qiov, 0, buf, qiov->size);
out_buf = g_malloc(s->cluster_size);
/* best compression, small window, no zlib header */
memset(&strm, 0, sizeof(strm));
ret = deflateInit2(&strm, Z_DEFAULT_COMPRESSION,
Z_DEFLATED, -12,
9, Z_DEFAULT_STRATEGY);
if (ret != 0) {
ret = -EINVAL;
goto fail;
}
strm.avail_in = s->cluster_size;
strm.next_in = (uint8_t *)buf;
strm.avail_out = s->cluster_size;
strm.next_out = out_buf;
ret = deflate(&strm, Z_FINISH);
if (ret != Z_STREAM_END && ret != Z_OK) {
deflateEnd(&strm);
ret = -EINVAL;
goto fail;
}
out_len = strm.next_out - out_buf;
deflateEnd(&strm);
if (ret != Z_STREAM_END || out_len >= s->cluster_size) {
/* could not compress: write normal cluster */
ret = qcow_co_writev(bs, offset >> BDRV_SECTOR_BITS,
bytes >> BDRV_SECTOR_BITS, qiov);
if (ret < 0) {
goto fail;
}
goto success;
}
qemu_co_mutex_lock(&s->lock);
cluster_offset = get_cluster_offset(bs, offset, 2, out_len, 0, 0);
qemu_co_mutex_unlock(&s->lock);
if (cluster_offset == 0) {
ret = -EIO;
goto fail;
}
cluster_offset &= s->cluster_offset_mask;
iov = (struct iovec) {
.iov_base = out_buf,
.iov_len = out_len,
};
qemu_iovec_init_external(&hd_qiov, &iov, 1);
ret = bdrv_co_pwritev(bs->file, cluster_offset, out_len, &hd_qiov, 0);
if (ret < 0) {
goto fail;
}
success:
ret = 0;
fail:
qemu_vfree(buf);
g_free(out_buf);
return ret;
}
| 5,148 |
qemu | 71e605f80313a632cc6714cde7bd240042dbdd95 | 1 | static int fifo_put(SerialState *s, int fifo, uint8_t chr)
{
SerialFIFO *f = (fifo) ? &s->recv_fifo : &s->xmit_fifo;
f->data[f->head++] = chr;
if (f->head == UART_FIFO_LENGTH)
f->head = 0;
f->count++;
return 1;
}
| 5,149 |
qemu | c627e7526a902dd5bb1907dbbd5cf961679dfa68 | 1 | static void rc4030_reset(DeviceState *dev)
{
rc4030State *s = RC4030(dev);
int i;
s->config = 0x410; /* some boards seem to accept 0x104 too */
s->revision = 1;
s->invalid_address_register = 0;
memset(s->dma_regs, 0, sizeof(s->dma_regs));
rc4030_dma_tt_update(s, 0, 0);
s->remote_failed_address = s->memory_failed_address = 0;
s->cache_maint = 0;
s->cache_ptag = s->cache_ltag = 0;
s->cache_bmask = 0;
s->memory_refresh_rate = 0x18186;
s->nvram_protect = 7;
for (i = 0; i < 15; i++)
s->rem_speed[i] = 7;
s->imr_jazz = 0x10; /* XXX: required by firmware, but why? */
s->isr_jazz = 0;
s->itr = 0;
qemu_irq_lower(s->timer_irq);
qemu_irq_lower(s->jazz_bus_irq);
}
| 5,150 |
FFmpeg | f1e173049ecc9de03817385ba8962d14cba779db | 0 | static void encode_refpass(Jpeg2000T1Context *t1, int width, int height, int *nmsedec, int bpno)
{
int y0, x, y, mask = 1 << (bpno + NMSEDEC_FRACBITS);
for (y0 = 0; y0 < height; y0 += 4)
for (x = 0; x < width; x++)
for (y = y0; y < height && y < y0+4; y++)
if ((t1->flags[y+1][x+1] & (JPEG2000_T1_SIG | JPEG2000_T1_VIS)) == JPEG2000_T1_SIG){
int ctxno = ff_jpeg2000_getrefctxno(t1->flags[y+1][x+1]);
*nmsedec += getnmsedec_ref(t1->data[y][x], bpno + NMSEDEC_FRACBITS);
ff_mqc_encode(&t1->mqc, t1->mqc.cx_states + ctxno, t1->data[y][x] & mask ? 1:0);
t1->flags[y+1][x+1] |= JPEG2000_T1_REF;
}
}
| 5,151 |
qemu | 297a3646c2947ee64a6d42ca264039732c6218e0 | 1 | void visit_optional(Visitor *v, bool *present, const char *name,
Error **errp)
{
if (!error_is_set(errp) && v->optional) {
v->optional(v, present, name, errp);
}
}
| 5,152 |
FFmpeg | 417364ce1f979031ef6fee661fc15e1869bdb1b4 | 1 | static int ws_snd_decode_frame(AVCodecContext *avctx,
void *data, int *data_size,
AVPacket *avpkt)
{
const uint8_t *buf = avpkt->data;
int buf_size = avpkt->size;
// WSSNDContext *c = avctx->priv_data;
int in_size, out_size;
int sample = 128;
int i;
uint8_t *samples = data;
if (!buf_size)
return 0;
out_size = AV_RL16(&buf[0]);
in_size = AV_RL16(&buf[2]);
buf += 4;
if (out_size > *data_size) {
av_log(avctx, AV_LOG_ERROR, "Frame is too large to fit in buffer\n");
return -1;
}
if (in_size > buf_size) {
av_log(avctx, AV_LOG_ERROR, "Frame data is larger than input buffer\n");
return -1;
}
*data_size = out_size;
if (in_size == out_size) {
for (i = 0; i < out_size; i++)
*samples++ = *buf++;
return buf_size;
}
while (out_size > 0) {
int code;
uint8_t count;
code = (*buf) >> 6;
count = (*buf) & 0x3F;
buf++;
switch(code) {
case 0: /* ADPCM 2-bit */
for (count++; count > 0; count--) {
code = *buf++;
sample += ws_adpcm_2bit[code & 0x3];
sample = av_clip_uint8(sample);
*samples++ = sample;
sample += ws_adpcm_2bit[(code >> 2) & 0x3];
sample = av_clip_uint8(sample);
*samples++ = sample;
sample += ws_adpcm_2bit[(code >> 4) & 0x3];
sample = av_clip_uint8(sample);
*samples++ = sample;
sample += ws_adpcm_2bit[(code >> 6) & 0x3];
sample = av_clip_uint8(sample);
*samples++ = sample;
out_size -= 4;
}
break;
case 1: /* ADPCM 4-bit */
for (count++; count > 0; count--) {
code = *buf++;
sample += ws_adpcm_4bit[code & 0xF];
sample = av_clip_uint8(sample);
*samples++ = sample;
sample += ws_adpcm_4bit[code >> 4];
sample = av_clip_uint8(sample);
*samples++ = sample;
out_size -= 2;
}
break;
case 2: /* no compression */
if (count & 0x20) { /* big delta */
int8_t t;
t = count;
t <<= 3;
sample += t >> 3;
sample = av_clip_uint8(sample);
*samples++ = sample;
out_size--;
} else { /* copy */
for (count++; count > 0; count--) {
*samples++ = *buf++;
out_size--;
}
sample = buf[-1];
}
break;
default: /* run */
for(count++; count > 0; count--) {
*samples++ = sample;
out_size--;
}
}
}
return buf_size;
}
| 5,153 |
FFmpeg | f6774f905fb3cfdc319523ac640be30b14c1bc55 | 1 | static int mb_var_thread(AVCodecContext *c, void *arg){
MpegEncContext *s= *(void**)arg;
int mb_x, mb_y;
for(mb_y=s->start_mb_y; mb_y < s->end_mb_y; mb_y++) {
for(mb_x=0; mb_x < s->mb_width; mb_x++) {
int xx = mb_x * 16;
int yy = mb_y * 16;
uint8_t *pix = s->new_picture.f.data[0] + (yy * s->linesize) + xx;
int varc;
int sum = s->dsp.pix_sum(pix, s->linesize);
varc = (s->dsp.pix_norm1(pix, s->linesize) - (((unsigned)sum*sum)>>8) + 500 + 128)>>8;
s->current_picture.mb_var [s->mb_stride * mb_y + mb_x] = varc;
s->current_picture.mb_mean[s->mb_stride * mb_y + mb_x] = (sum+128)>>8;
s->me.mb_var_sum_temp += varc;
}
}
return 0;
}
| 5,154 |
qemu | 40ff6d7e8dceca227e7f8a3e8e0d58b2c66d19b4 | 1 | int inet_listen_opts(QemuOpts *opts, int port_offset)
{
struct addrinfo ai,*res,*e;
const char *addr;
char port[33];
char uaddr[INET6_ADDRSTRLEN+1];
char uport[33];
int slisten,rc,to,try_next;
memset(&ai,0, sizeof(ai));
ai.ai_flags = AI_PASSIVE | AI_ADDRCONFIG;
ai.ai_family = PF_UNSPEC;
ai.ai_socktype = SOCK_STREAM;
if (qemu_opt_get(opts, "port") == NULL) {
fprintf(stderr, "%s: host and/or port not specified\n", __FUNCTION__);
return -1;
}
pstrcpy(port, sizeof(port), qemu_opt_get(opts, "port"));
addr = qemu_opt_get(opts, "host");
to = qemu_opt_get_number(opts, "to", 0);
if (qemu_opt_get_bool(opts, "ipv4", 0))
ai.ai_family = PF_INET;
if (qemu_opt_get_bool(opts, "ipv6", 0))
ai.ai_family = PF_INET6;
/* lookup */
if (port_offset)
snprintf(port, sizeof(port), "%d", atoi(port) + port_offset);
rc = getaddrinfo(strlen(addr) ? addr : NULL, port, &ai, &res);
if (rc != 0) {
fprintf(stderr,"getaddrinfo(%s,%s): %s\n", addr, port,
gai_strerror(rc));
return -1;
}
if (sockets_debug)
inet_print_addrinfo(__FUNCTION__, res);
/* create socket + bind */
for (e = res; e != NULL; e = e->ai_next) {
getnameinfo((struct sockaddr*)e->ai_addr,e->ai_addrlen,
uaddr,INET6_ADDRSTRLEN,uport,32,
NI_NUMERICHOST | NI_NUMERICSERV);
slisten = socket(e->ai_family, e->ai_socktype, e->ai_protocol);
if (slisten < 0) {
fprintf(stderr,"%s: socket(%s): %s\n", __FUNCTION__,
inet_strfamily(e->ai_family), strerror(errno));
continue;
}
setsockopt(slisten,SOL_SOCKET,SO_REUSEADDR,(void*)&on,sizeof(on));
#ifdef IPV6_V6ONLY
if (e->ai_family == PF_INET6) {
/* listen on both ipv4 and ipv6 */
setsockopt(slisten,IPPROTO_IPV6,IPV6_V6ONLY,(void*)&off,
sizeof(off));
}
#endif
for (;;) {
if (bind(slisten, e->ai_addr, e->ai_addrlen) == 0) {
if (sockets_debug)
fprintf(stderr,"%s: bind(%s,%s,%d): OK\n", __FUNCTION__,
inet_strfamily(e->ai_family), uaddr, inet_getport(e));
goto listen;
}
try_next = to && (inet_getport(e) <= to + port_offset);
if (!try_next || sockets_debug)
fprintf(stderr,"%s: bind(%s,%s,%d): %s\n", __FUNCTION__,
inet_strfamily(e->ai_family), uaddr, inet_getport(e),
strerror(errno));
if (try_next) {
inet_setport(e, inet_getport(e) + 1);
continue;
}
break;
}
closesocket(slisten);
}
fprintf(stderr, "%s: FAILED\n", __FUNCTION__);
freeaddrinfo(res);
return -1;
listen:
if (listen(slisten,1) != 0) {
perror("listen");
closesocket(slisten);
freeaddrinfo(res);
return -1;
}
snprintf(uport, sizeof(uport), "%d", inet_getport(e) - port_offset);
qemu_opt_set(opts, "host", uaddr);
qemu_opt_set(opts, "port", uport);
qemu_opt_set(opts, "ipv6", (e->ai_family == PF_INET6) ? "on" : "off");
qemu_opt_set(opts, "ipv4", (e->ai_family != PF_INET6) ? "on" : "off");
freeaddrinfo(res);
return slisten;
}
| 5,155 |
FFmpeg | f5e717f3c735af5c941b458d42615c97028aa916 | 1 | static int socket_open_listen(struct sockaddr_in *my_addr)
{
int server_fd, tmp;
server_fd = socket(AF_INET,SOCK_STREAM,0);
if (server_fd < 0) {
perror ("socket");
return -1;
}
tmp = 1;
setsockopt(server_fd, SOL_SOCKET, SO_REUSEADDR, &tmp, sizeof(tmp));
if (bind (server_fd, (struct sockaddr *) my_addr, sizeof (*my_addr)) < 0) {
char bindmsg[32];
snprintf(bindmsg, sizeof(bindmsg), "bind(port %d)", ntohs(my_addr->sin_port));
perror (bindmsg);
closesocket(server_fd);
return -1;
}
if (listen (server_fd, 5) < 0) {
perror ("listen");
closesocket(server_fd);
return -1;
}
ff_socket_nonblock(server_fd, 1);
return server_fd;
} | 5,159 |
qemu | ab7a0f0b6dbe8836d490c736803abef6e3695e1f | 1 | static uint64_t pxa2xx_gpio_read(void *opaque, hwaddr offset,
unsigned size)
{
PXA2xxGPIOInfo *s = (PXA2xxGPIOInfo *) opaque;
uint32_t ret;
int bank;
if (offset >= 0x200)
return 0;
bank = pxa2xx_gpio_regs[offset].bank;
switch (pxa2xx_gpio_regs[offset].reg) {
case GPDR: /* GPIO Pin-Direction registers */
return s->dir[bank];
case GPSR: /* GPIO Pin-Output Set registers */
printf("%s: Read from a write-only register " REG_FMT "\n",
__FUNCTION__, offset);
return s->gpsr[bank]; /* Return last written value. */
case GPCR: /* GPIO Pin-Output Clear registers */
printf("%s: Read from a write-only register " REG_FMT "\n",
__FUNCTION__, offset);
return 31337; /* Specified as unpredictable in the docs. */
case GRER: /* GPIO Rising-Edge Detect Enable registers */
return s->rising[bank];
case GFER: /* GPIO Falling-Edge Detect Enable registers */
return s->falling[bank];
case GAFR_L: /* GPIO Alternate Function registers */
return s->gafr[bank * 2];
case GAFR_U: /* GPIO Alternate Function registers */
return s->gafr[bank * 2 + 1];
case GPLR: /* GPIO Pin-Level registers */
ret = (s->olevel[bank] & s->dir[bank]) |
(s->ilevel[bank] & ~s->dir[bank]);
qemu_irq_raise(s->read_notify);
return ret;
case GEDR: /* GPIO Edge Detect Status registers */
return s->status[bank];
default:
hw_error("%s: Bad offset " REG_FMT "\n", __FUNCTION__, offset);
}
return 0;
}
| 5,160 |
qemu | e0dadc1e9ef1f35208e5d2af9c7740c18a0b769f | 1 | static void aux_slave_class_init(ObjectClass *klass, void *data)
{
DeviceClass *k = DEVICE_CLASS(klass);
set_bit(DEVICE_CATEGORY_MISC, k->categories);
k->bus_type = TYPE_AUX_BUS;
}
| 5,161 |
qemu | 5cbdd273fbf5e977d14b1f06976489d8e4625a68 | 1 | static void vmdk_close(BlockDriverState *bs)
{
BDRVVmdkState *s = bs->opaque;
qemu_free(s->l1_table);
qemu_free(s->l2_cache);
bdrv_delete(s->hd);
// try to close parent image, if exist
vmdk_parent_close(s->hd);
}
| 5,162 |
qemu | 1e356fc14beaa3ece6c0e961bd479af58be3198b | 1 | static void *file_ram_alloc(RAMBlock *block,
ram_addr_t memory,
const char *path,
Error **errp)
{
bool unlink_on_error = false;
char *filename;
char *sanitized_name;
char *c;
void *area = MAP_FAILED;
int fd = -1;
int64_t file_size;
if (kvm_enabled() && !kvm_has_sync_mmu()) {
error_setg(errp,
"host lacks kvm mmu notifiers, -mem-path unsupported");
return NULL;
}
for (;;) {
fd = open(path, O_RDWR);
if (fd >= 0) {
/* @path names an existing file, use it */
break;
}
if (errno == ENOENT) {
/* @path names a file that doesn't exist, create it */
fd = open(path, O_RDWR | O_CREAT | O_EXCL, 0644);
if (fd >= 0) {
unlink_on_error = true;
break;
}
} else if (errno == EISDIR) {
/* @path names a directory, create a file there */
/* Make name safe to use with mkstemp by replacing '/' with '_'. */
sanitized_name = g_strdup(memory_region_name(block->mr));
for (c = sanitized_name; *c != '\0'; c++) {
if (*c == '/') {
*c = '_';
}
}
filename = g_strdup_printf("%s/qemu_back_mem.%s.XXXXXX", path,
sanitized_name);
g_free(sanitized_name);
fd = mkstemp(filename);
if (fd >= 0) {
unlink(filename);
g_free(filename);
break;
}
g_free(filename);
}
if (errno != EEXIST && errno != EINTR) {
error_setg_errno(errp, errno,
"can't open backing store %s for guest RAM",
path);
goto error;
}
/*
* Try again on EINTR and EEXIST. The latter happens when
* something else creates the file between our two open().
*/
}
block->page_size = qemu_fd_getpagesize(fd);
block->mr->align = block->page_size;
#if defined(__s390x__)
if (kvm_enabled()) {
block->mr->align = MAX(block->mr->align, QEMU_VMALLOC_ALIGN);
}
#endif
file_size = get_file_size(fd);
if (memory < block->page_size) {
error_setg(errp, "memory size 0x" RAM_ADDR_FMT " must be equal to "
"or larger than page size 0x%zx",
memory, block->page_size);
goto error;
}
if (file_size > 0 && file_size < memory) {
error_setg(errp, "backing store %s size 0x%" PRIx64
" does not match 'size' option 0x" RAM_ADDR_FMT,
path, file_size, memory);
goto error;
}
memory = ROUND_UP(memory, block->page_size);
/*
* ftruncate is not supported by hugetlbfs in older
* hosts, so don't bother bailing out on errors.
* If anything goes wrong with it under other filesystems,
* mmap will fail.
*
* Do not truncate the non-empty backend file to avoid corrupting
* the existing data in the file. Disabling shrinking is not
* enough. For example, the current vNVDIMM implementation stores
* the guest NVDIMM labels at the end of the backend file. If the
* backend file is later extended, QEMU will not be able to find
* those labels. Therefore, extending the non-empty backend file
* is disabled as well.
*/
if (!file_size && ftruncate(fd, memory)) {
perror("ftruncate");
}
area = qemu_ram_mmap(fd, memory, block->mr->align,
block->flags & RAM_SHARED);
if (area == MAP_FAILED) {
error_setg_errno(errp, errno,
"unable to map backing store for guest RAM");
goto error;
}
if (mem_prealloc) {
os_mem_prealloc(fd, area, memory, errp);
if (errp && *errp) {
goto error;
}
}
block->fd = fd;
return area;
error:
if (area != MAP_FAILED) {
qemu_ram_munmap(area, memory);
}
if (unlink_on_error) {
unlink(path);
}
if (fd != -1) {
close(fd);
}
return NULL;
}
| 5,163 |
FFmpeg | 81d4b3af81b52a79f11705ef02d3f48747047404 | 1 | static void qpeg_decode_inter(const uint8_t *src, uint8_t *dst, int size,
int stride, int width, int height,
int delta, const uint8_t *ctable, uint8_t *refdata)
{
int i, j;
int code;
int filled = 0;
int orig_height;
if(!refdata)
refdata= dst;
/* copy prev frame */
for(i = 0; i < height; i++)
memcpy(dst + (i * stride), refdata + (i * stride), width);
orig_height = height;
height--;
dst = dst + height * stride;
while((size > 0) && (height >= 0)) {
code = *src++;
size--;
if(delta) {
/* motion compensation */
while((code & 0xF0) == 0xF0) {
if(delta == 1) {
int me_idx;
int me_w, me_h, me_x, me_y;
uint8_t *me_plane;
int corr, val;
/* get block size by index */
me_idx = code & 0xF;
me_w = qpeg_table_w[me_idx];
me_h = qpeg_table_h[me_idx];
/* extract motion vector */
corr = *src++;
size--;
val = corr >> 4;
if(val > 7)
val -= 16;
me_x = val;
val = corr & 0xF;
if(val > 7)
val -= 16;
me_y = val;
/* check motion vector */
if ((me_x + filled < 0) || (me_x + me_w + filled > width) ||
(height - me_y - me_h < 0) || (height - me_y > orig_height) ||
(filled + me_w > width) || (height - me_h < 0))
av_log(NULL, AV_LOG_ERROR, "Bogus motion vector (%i,%i), block size %ix%i at %i,%i\n",
me_x, me_y, me_w, me_h, filled, height);
else {
/* do motion compensation */
me_plane = refdata + (filled + me_x) + (height - me_y) * stride;
for(j = 0; j < me_h; j++) {
for(i = 0; i < me_w; i++)
dst[filled + i - (j * stride)] = me_plane[i - (j * stride)];
}
}
}
code = *src++;
size--;
}
}
if(code == 0xE0) /* end-of-picture code */
break;
if(code > 0xE0) { /* run code: 0xE1..0xFF */
int p;
code &= 0x1F;
p = *src++;
size--;
for(i = 0; i <= code; i++) {
dst[filled++] = p;
if(filled >= width) {
filled = 0;
dst -= stride;
height--;
if(height < 0)
break;
}
}
} else if(code >= 0xC0) { /* copy code: 0xC0..0xDF */
code &= 0x1F;
for(i = 0; i <= code; i++) {
dst[filled++] = *src++;
if(filled >= width) {
filled = 0;
dst -= stride;
height--;
if(height < 0)
break;
}
}
size -= code + 1;
} else if(code >= 0x80) { /* skip code: 0x80..0xBF */
int skip;
code &= 0x3F;
/* codes 0x80 and 0x81 are actually escape codes,
skip value minus constant is in the next byte */
if(!code)
skip = (*src++) + 64;
else if(code == 1)
skip = (*src++) + 320;
else
skip = code;
filled += skip;
while( filled >= width) {
filled -= width;
dst -= stride;
height--;
if(height < 0)
break;
}
} else {
/* zero code treated as one-pixel skip */
if(code)
dst[filled++] = ctable[code & 0x7F];
else
filled++;
if(filled >= width) {
filled = 0;
dst -= stride;
height--;
}
}
}
}
| 5,164 |
qemu | 67e55caa6dcb91c80428cee6fe463f8dd8a755ab | 1 | static void x86_cpu_apic_create(X86CPU *cpu, Error **errp)
{
APICCommonState *apic;
const char *apic_type = "apic";
if (kvm_apic_in_kernel()) {
apic_type = "kvm-apic";
} else if (xen_enabled()) {
apic_type = "xen-apic";
}
cpu->apic_state = DEVICE(object_new(apic_type));
object_property_add_child(OBJECT(cpu), "lapic",
OBJECT(cpu->apic_state), &error_abort);
qdev_prop_set_uint8(cpu->apic_state, "id", cpu->apic_id);
/* TODO: convert to link<> */
apic = APIC_COMMON(cpu->apic_state);
apic->cpu = cpu;
apic->apicbase = APIC_DEFAULT_ADDRESS | MSR_IA32_APICBASE_ENABLE;
} | 5,165 |
qemu | 60fe637bf0e4d7989e21e50f52526444765c63b4 | 1 | bool migration_has_failed(MigrationState *s)
{
return (s->state == MIG_STATE_CANCELLED ||
s->state == MIG_STATE_ERROR);
}
| 5,166 |
FFmpeg | be373cb50d3c411366fec7eef2eb3681abe48f96 | 1 | static const uint8_t *read_huffman_tables(FourXContext *f,
const uint8_t * const buf)
{
int frequency[512] = { 0 };
uint8_t flag[512];
int up[512];
uint8_t len_tab[257];
int bits_tab[257];
int start, end;
const uint8_t *ptr = buf;
int j;
memset(up, -1, sizeof(up));
start = *ptr++;
end = *ptr++;
for (;;) {
int i;
for (i = start; i <= end; i++)
frequency[i] = *ptr++;
start = *ptr++;
if (start == 0)
break;
end = *ptr++;
}
frequency[256] = 1;
while ((ptr - buf) & 3)
ptr++; // 4byte align
for (j = 257; j < 512; j++) {
int min_freq[2] = { 256 * 256, 256 * 256 };
int smallest[2] = { 0, 0 };
int i;
for (i = 0; i < j; i++) {
if (frequency[i] == 0)
continue;
if (frequency[i] < min_freq[1]) {
if (frequency[i] < min_freq[0]) {
min_freq[1] = min_freq[0];
smallest[1] = smallest[0];
min_freq[0] = frequency[i];
smallest[0] = i;
} else {
min_freq[1] = frequency[i];
smallest[1] = i;
}
}
}
if (min_freq[1] == 256 * 256)
break;
frequency[j] = min_freq[0] + min_freq[1];
flag[smallest[0]] = 0;
flag[smallest[1]] = 1;
up[smallest[0]] =
up[smallest[1]] = j;
frequency[smallest[0]] = frequency[smallest[1]] = 0;
}
for (j = 0; j < 257; j++) {
int node, len = 0, bits = 0;
for (node = j; up[node] != -1; node = up[node]) {
bits += flag[node] << len;
len++;
if (len > 31)
// can this happen at all ?
av_log(f->avctx, AV_LOG_ERROR,
"vlc length overflow\n");
}
bits_tab[j] = bits;
len_tab[j] = len;
}
if (init_vlc(&f->pre_vlc, ACDC_VLC_BITS, 257, len_tab, 1, 1,
bits_tab, 4, 4, 0))
return NULL;
return ptr;
}
| 5,167 |
qemu | 854e67fea6a6f181163a5467fc9ba04de8d181bb | 1 | void hmp_info_tlb(Monitor *mon, const QDict *qdict)
{
CPUArchState *env1 = mon_get_cpu_env();
dump_mmu((FILE*)mon, (fprintf_function)monitor_printf, env1);
| 5,168 |
FFmpeg | a9456c7c5ca883b5a3947e59a9fba5587e18e119 | 1 | int ff_mjpeg_decode_frame(AVCodecContext *avctx, void *data, int *data_size,
AVPacket *avpkt)
{
const uint8_t *buf = avpkt->data;
int buf_size = avpkt->size;
MJpegDecodeContext *s = avctx->priv_data;
const uint8_t *buf_end, *buf_ptr;
const uint8_t *unescaped_buf_ptr;
int unescaped_buf_size;
int start_code;
int i, index;
int ret = 0;
AVFrame *picture = data;
s->got_picture = 0; // picture from previous image can not be reused
buf_ptr = buf;
buf_end = buf + buf_size;
while (buf_ptr < buf_end) {
/* find start next marker */
start_code = ff_mjpeg_find_marker(s, &buf_ptr, buf_end,
&unescaped_buf_ptr,
&unescaped_buf_size);
/* EOF */
if (start_code < 0) {
goto the_end;
} else if (unescaped_buf_size > (1U<<29)) {
av_log(avctx, AV_LOG_ERROR, "MJPEG packet 0x%x too big (0x%x/0x%x), corrupt data?\n",
start_code, unescaped_buf_size, buf_size);
return AVERROR_INVALIDDATA;
} else {
av_log(avctx, AV_LOG_DEBUG, "marker=%x avail_size_in_buf=%td\n",
start_code, buf_end - buf_ptr);
init_get_bits(&s->gb, unescaped_buf_ptr, unescaped_buf_size * 8);
s->start_code = start_code;
if (s->avctx->debug & FF_DEBUG_STARTCODE)
av_log(avctx, AV_LOG_DEBUG, "startcode: %X\n", start_code);
/* process markers */
if (start_code >= 0xd0 && start_code <= 0xd7)
av_log(avctx, AV_LOG_DEBUG,
"restart marker: %d\n", start_code & 0x0f);
/* APP fields */
else if (start_code >= APP0 && start_code <= APP15)
mjpeg_decode_app(s);
/* Comment */
else if (start_code == COM)
mjpeg_decode_com(s);
switch (start_code) {
case SOI:
s->restart_interval = 0;
s->restart_count = 0;
/* nothing to do on SOI */
break;
case DQT:
ff_mjpeg_decode_dqt(s);
break;
case DHT:
if ((ret = ff_mjpeg_decode_dht(s)) < 0) {
av_log(avctx, AV_LOG_ERROR, "huffman table decode error\n");
return ret;
}
break;
case SOF0:
case SOF1:
s->lossless = 0;
s->ls = 0;
s->progressive = 0;
if ((ret = ff_mjpeg_decode_sof(s)) < 0)
return ret;
break;
case SOF2:
s->lossless = 0;
s->ls = 0;
s->progressive = 1;
if ((ret = ff_mjpeg_decode_sof(s)) < 0)
return ret;
break;
case SOF3:
s->lossless = 1;
s->ls = 0;
s->progressive = 0;
if ((ret = ff_mjpeg_decode_sof(s)) < 0)
return ret;
break;
case SOF48:
s->lossless = 1;
s->ls = 1;
s->progressive = 0;
if ((ret = ff_mjpeg_decode_sof(s)) < 0)
return ret;
break;
case LSE:
if (!CONFIG_JPEGLS_DECODER ||
(ret = ff_jpegls_decode_lse(s)) < 0)
return ret;
break;
case EOI:
eoi_parser:
s->cur_scan = 0;
if (!s->got_picture) {
av_log(avctx, AV_LOG_WARNING,
"Found EOI before any SOF, ignoring\n");
break;
}
if (s->interlaced) {
s->bottom_field ^= 1;
/* if not bottom field, do not output image yet */
if (s->bottom_field == !s->interlace_polarity)
break;
}
*picture = *s->picture_ptr;
*data_size = sizeof(AVFrame);
if (!s->lossless) {
picture->quality = FFMAX3(s->qscale[0],
s->qscale[1],
s->qscale[2]);
picture->qstride = 0;
picture->qscale_table = s->qscale_table;
memset(picture->qscale_table, picture->quality,
(s->width + 15) / 16);
if (avctx->debug & FF_DEBUG_QP)
av_log(avctx, AV_LOG_DEBUG,
"QP: %d\n", picture->quality);
picture->quality *= FF_QP2LAMBDA;
}
goto the_end;
case SOS:
if ((ret = ff_mjpeg_decode_sos(s, NULL, NULL)) < 0 &&
(avctx->err_recognition & AV_EF_EXPLODE))
return ret;
break;
case DRI:
mjpeg_decode_dri(s);
break;
case SOF5:
case SOF6:
case SOF7:
case SOF9:
case SOF10:
case SOF11:
case SOF13:
case SOF14:
case SOF15:
case JPG:
av_log(avctx, AV_LOG_ERROR,
"mjpeg: unsupported coding type (%x)\n", start_code);
break;
}
/* eof process start code */
buf_ptr += (get_bits_count(&s->gb) + 7) / 8;
av_log(avctx, AV_LOG_DEBUG,
"marker parser used %d bytes (%d bits)\n",
(get_bits_count(&s->gb) + 7) / 8, get_bits_count(&s->gb));
}
}
if (s->got_picture) {
av_log(avctx, AV_LOG_WARNING, "EOI missing, emulating\n");
goto eoi_parser;
}
av_log(avctx, AV_LOG_FATAL, "No JPEG data found in image\n");
return AVERROR_INVALIDDATA;
the_end:
if (s->upscale_h) {
uint8_t *line = s->picture_ptr->data[s->upscale_h];
av_assert0(avctx->pix_fmt == AV_PIX_FMT_YUVJ444P ||
avctx->pix_fmt == AV_PIX_FMT_YUV444P ||
avctx->pix_fmt == AV_PIX_FMT_YUVJ440P ||
avctx->pix_fmt == AV_PIX_FMT_YUV440P);
for (i = 0; i < s->chroma_height; i++) {
for (index = s->width - 1; index; index--)
line[index] = (line[index / 2] + line[(index + 1) / 2]) >> 1;
line += s->linesize[s->upscale_h];
}
}
if (s->upscale_v) {
uint8_t *dst = &((uint8_t *)s->picture_ptr->data[s->upscale_v])[(s->height - 1) * s->linesize[s->upscale_v]];
av_assert0(avctx->pix_fmt == AV_PIX_FMT_YUVJ444P ||
avctx->pix_fmt == AV_PIX_FMT_YUV444P ||
avctx->pix_fmt == AV_PIX_FMT_YUVJ422P ||
avctx->pix_fmt == AV_PIX_FMT_YUV422P);
for (i = s->height - 1; i; i--) {
uint8_t *src1 = &((uint8_t *)s->picture_ptr->data[s->upscale_v])[i / 2 * s->linesize[s->upscale_v]];
uint8_t *src2 = &((uint8_t *)s->picture_ptr->data[s->upscale_v])[(i + 1) / 2 * s->linesize[s->upscale_v]];
if (src1 == src2) {
memcpy(dst, src1, s->width);
} else {
for (index = 0; index < s->width; index++)
dst[index] = (src1[index] + src2[index]) >> 1;
}
dst -= s->linesize[s->upscale_v];
}
}
if (s->flipped && (s->avctx->flags & CODEC_FLAG_EMU_EDGE)) {
int hshift, vshift, j;
avcodec_get_chroma_sub_sample(s->avctx->pix_fmt, &hshift, &vshift);
for (index=0; index<4; index++) {
uint8_t *dst = s->picture_ptr->data[index];
int w = s->width;
int h = s->height;
if(index && index<3){
w = -((-w) >> hshift);
h = -((-h) >> vshift);
}
if(dst){
uint8_t *dst2 = dst + s->linesize[index]*(h-1);
for (i=0; i<h/2; i++) {
for (j=0; j<w; j++)
FFSWAP(int, dst[j], dst2[j]);
dst += s->linesize[index];
dst2 -= s->linesize[index];
}
}
}
}
av_log(avctx, AV_LOG_DEBUG, "decode frame unused %td bytes\n",
buf_end - buf_ptr);
// return buf_end - buf_ptr;
return buf_ptr - buf;
}
| 5,169 |
qemu | f693fe6ef402bdcf89b3979bf004c4c5bf646724 | 1 | GuestMemoryBlockList *qmp_guest_get_memory_blocks(Error **errp)
{
GuestMemoryBlockList *head, **link;
Error *local_err = NULL;
struct dirent *de;
DIR *dp;
head = NULL;
link = &head;
dp = opendir("/sys/devices/system/memory/");
if (!dp) {
error_setg_errno(errp, errno, "Can't open directory"
"\"/sys/devices/system/memory/\"\n");
return NULL;
}
/* Note: the phys_index of memory block may be discontinuous,
* this is because a memblk is the unit of the Sparse Memory design, which
* allows discontinuous memory ranges (ex. NUMA), so here we should
* traverse the memory block directory.
*/
while ((de = readdir(dp)) != NULL) {
GuestMemoryBlock *mem_blk;
GuestMemoryBlockList *entry;
if ((strncmp(de->d_name, "memory", 6) != 0) ||
!(de->d_type & DT_DIR)) {
continue;
}
mem_blk = g_malloc0(sizeof *mem_blk);
/* The d_name is "memoryXXX", phys_index is block id, same as XXX */
mem_blk->phys_index = strtoul(&de->d_name[6], NULL, 10);
mem_blk->has_can_offline = true; /* lolspeak ftw */
transfer_memory_block(mem_blk, true, NULL, &local_err);
entry = g_malloc0(sizeof *entry);
entry->value = mem_blk;
*link = entry;
link = &entry->next;
}
closedir(dp);
if (local_err == NULL) {
/* there's no guest with zero memory blocks */
if (head == NULL) {
error_setg(errp, "guest reported zero memory blocks!");
}
return head;
}
qapi_free_GuestMemoryBlockList(head);
error_propagate(errp, local_err);
return NULL;
}
| 5,170 |
qemu | 3a661f1eabf7e8db66e28489884d9b54aacb94ea | 1 | int qcrypto_cipher_decrypt(QCryptoCipher *cipher,
const void *in,
void *out,
size_t len,
Error **errp)
{
QCryptoCipherBuiltin *ctxt = cipher->opaque;
return ctxt->decrypt(cipher, in, out, len, errp); | 5,173 |
qemu | 5e30a07d6d70d3073ff61e6db79d61c2b688502f | 1 | static int get_blocksize(BlockDriverState *bdrv)
{
uint8_t cmd[10];
uint8_t buf[8];
uint8_t sensebuf[8];
sg_io_hdr_t io_header;
int ret;
memset(cmd, 0, sizeof(cmd));
memset(buf, 0, sizeof(buf));
cmd[0] = READ_CAPACITY;
memset(&io_header, 0, sizeof(io_header));
io_header.interface_id = 'S';
io_header.dxfer_direction = SG_DXFER_FROM_DEV;
io_header.dxfer_len = sizeof(buf);
io_header.dxferp = buf;
io_header.cmdp = cmd;
io_header.cmd_len = sizeof(cmd);
io_header.mx_sb_len = sizeof(sensebuf);
io_header.sbp = sensebuf;
io_header.timeout = 6000; /* XXX */
ret = bdrv_ioctl(bdrv, SG_IO, &io_header);
if (ret < 0)
return -1;
return (buf[4] << 24) | (buf[5] << 16) | (buf[6] << 8) | buf[7];
}
| 5,175 |
FFmpeg | aed84ee4d1b0c9e315a84b1ee0918fa49ee9cc09 | 1 | static int svq1_decode_block_non_intra(GetBitContext *bitbuf, uint8_t *pixels,
ptrdiff_t pitch)
{
uint32_t bit_cache;
uint8_t *list[63];
uint32_t *dst;
const uint32_t *codebook;
int entries[6];
int i, j, m, n;
int stages;
unsigned mean;
int x, y, width, height, level;
uint32_t n1, n2, n3, n4;
/* initialize list for breadth first processing of vectors */
list[0] = pixels;
/* recursively process vector */
for (i = 0, m = 1, n = 1, level = 5; i < n; i++) {
SVQ1_PROCESS_VECTOR();
/* destination address and vector size */
dst = (uint32_t *)list[i];
width = 1 << ((4 + level) / 2);
height = 1 << ((3 + level) / 2);
/* get number of stages (-1 skips vector, 0 for mean only) */
stages = get_vlc2(bitbuf, svq1_inter_multistage[level].table, 3, 2) - 1;
if (stages == -1)
continue; /* skip vector */
if ((stages > 0 && level >= 4)) {
ff_dlog(NULL,
"Error (svq1_decode_block_non_intra): invalid vector: stages=%i level=%i\n",
stages, level);
return AVERROR_INVALIDDATA; /* invalid vector */
}
av_assert0(stages >= 0);
mean = get_vlc2(bitbuf, svq1_inter_mean.table, 9, 3) - 256;
SVQ1_CALC_CODEBOOK_ENTRIES(ff_svq1_inter_codebooks);
for (y = 0; y < height; y++) {
for (x = 0; x < width / 4; x++, codebook++) {
n3 = dst[x];
/* add mean value to vector */
n1 = n4 + ((n3 & 0xFF00FF00) >> 8);
n2 = n4 + (n3 & 0x00FF00FF);
SVQ1_ADD_CODEBOOK()
/* store result */
dst[x] = n1 << 8 | n2;
}
dst += pitch / 4;
}
}
return 0;
}
| 5,179 |
qemu | e7d81004e486b0e80a674d164d8aec0e83fa812f | 1 | void do_interrupt (CPUState *env)
{
#if !defined(CONFIG_USER_ONLY)
target_ulong offset;
int cause = -1;
const char *name;
if (qemu_log_enabled() && env->exception_index != EXCP_EXT_INTERRUPT) {
if (env->exception_index < 0 || env->exception_index > EXCP_LAST)
name = "unknown";
else
name = excp_names[env->exception_index];
qemu_log("%s enter: PC " TARGET_FMT_lx " EPC " TARGET_FMT_lx " %s exception\n",
__func__, env->active_tc.PC, env->CP0_EPC, name);
}
if (env->exception_index == EXCP_EXT_INTERRUPT &&
(env->hflags & MIPS_HFLAG_DM))
env->exception_index = EXCP_DINT;
offset = 0x180;
switch (env->exception_index) {
case EXCP_DSS:
env->CP0_Debug |= 1 << CP0DB_DSS;
/* Debug single step cannot be raised inside a delay slot and
resume will always occur on the next instruction
(but we assume the pc has always been updated during
code translation). */
env->CP0_DEPC = env->active_tc.PC | !!(env->hflags & MIPS_HFLAG_M16);
goto enter_debug_mode;
case EXCP_DINT:
env->CP0_Debug |= 1 << CP0DB_DINT;
goto set_DEPC;
case EXCP_DIB:
env->CP0_Debug |= 1 << CP0DB_DIB;
goto set_DEPC;
case EXCP_DBp:
env->CP0_Debug |= 1 << CP0DB_DBp;
goto set_DEPC;
case EXCP_DDBS:
env->CP0_Debug |= 1 << CP0DB_DDBS;
goto set_DEPC;
case EXCP_DDBL:
env->CP0_Debug |= 1 << CP0DB_DDBL;
set_DEPC:
env->CP0_DEPC = exception_resume_pc(env);
env->hflags &= ~MIPS_HFLAG_BMASK;
enter_debug_mode:
env->hflags |= MIPS_HFLAG_DM | MIPS_HFLAG_64 | MIPS_HFLAG_CP0;
env->hflags &= ~(MIPS_HFLAG_KSU);
/* EJTAG probe trap enable is not implemented... */
if (!(env->CP0_Status & (1 << CP0St_EXL)))
env->CP0_Cause &= ~(1 << CP0Ca_BD);
env->active_tc.PC = (int32_t)0xBFC00480;
set_hflags_for_handler(env);
break;
case EXCP_RESET:
cpu_reset(env);
break;
case EXCP_SRESET:
env->CP0_Status |= (1 << CP0St_SR);
memset(env->CP0_WatchLo, 0, sizeof(*env->CP0_WatchLo));
goto set_error_EPC;
case EXCP_NMI:
env->CP0_Status |= (1 << CP0St_NMI);
set_error_EPC:
env->CP0_ErrorEPC = exception_resume_pc(env);
env->hflags &= ~MIPS_HFLAG_BMASK;
env->CP0_Status |= (1 << CP0St_ERL) | (1 << CP0St_BEV);
env->hflags |= MIPS_HFLAG_64 | MIPS_HFLAG_CP0;
env->hflags &= ~(MIPS_HFLAG_KSU);
if (!(env->CP0_Status & (1 << CP0St_EXL)))
env->CP0_Cause &= ~(1 << CP0Ca_BD);
env->active_tc.PC = (int32_t)0xBFC00000;
set_hflags_for_handler(env);
break;
case EXCP_EXT_INTERRUPT:
cause = 0;
if (env->CP0_Cause & (1 << CP0Ca_IV))
offset = 0x200;
if (env->CP0_Config3 & ((1 << CP0C3_VInt) | (1 << CP0C3_VEIC))) {
/* Vectored Interrupts. */
unsigned int spacing;
unsigned int vector;
unsigned int pending = (env->CP0_Cause & CP0Ca_IP_mask) >> 8;
pending &= env->CP0_Status >> 8;
/* Compute the Vector Spacing. */
spacing = (env->CP0_IntCtl >> CP0IntCtl_VS) & ((1 << 6) - 1);
spacing <<= 5;
if (env->CP0_Config3 & (1 << CP0C3_VInt)) {
/* For VInt mode, the MIPS computes the vector internally. */
for (vector = 7; vector > 0; vector--) {
if (pending & (1 << vector)) {
/* Found it. */
break;
}
}
} else {
/* For VEIC mode, the external interrupt controller feeds the
vector throught the CP0Cause IP lines. */
vector = pending;
}
offset = 0x200 + vector * spacing;
}
goto set_EPC;
case EXCP_LTLBL:
cause = 1;
goto set_EPC;
case EXCP_TLBL:
cause = 2;
if (env->error_code == 1 && !(env->CP0_Status & (1 << CP0St_EXL))) {
#if defined(TARGET_MIPS64)
int R = env->CP0_BadVAddr >> 62;
int UX = (env->CP0_Status & (1 << CP0St_UX)) != 0;
int SX = (env->CP0_Status & (1 << CP0St_SX)) != 0;
int KX = (env->CP0_Status & (1 << CP0St_KX)) != 0;
if (((R == 0 && UX) || (R == 1 && SX) || (R == 3 && KX)) &&
(!(env->insn_flags & (INSN_LOONGSON2E | INSN_LOONGSON2F))))
offset = 0x080;
else
#endif
offset = 0x000;
}
goto set_EPC;
case EXCP_TLBS:
cause = 3;
if (env->error_code == 1 && !(env->CP0_Status & (1 << CP0St_EXL))) {
#if defined(TARGET_MIPS64)
int R = env->CP0_BadVAddr >> 62;
int UX = (env->CP0_Status & (1 << CP0St_UX)) != 0;
int SX = (env->CP0_Status & (1 << CP0St_SX)) != 0;
int KX = (env->CP0_Status & (1 << CP0St_KX)) != 0;
if (((R == 0 && UX) || (R == 1 && SX) || (R == 3 && KX)) &&
(!(env->insn_flags & (INSN_LOONGSON2E | INSN_LOONGSON2F))))
offset = 0x080;
else
#endif
offset = 0x000;
}
goto set_EPC;
case EXCP_AdEL:
cause = 4;
goto set_EPC;
case EXCP_AdES:
cause = 5;
goto set_EPC;
case EXCP_IBE:
cause = 6;
goto set_EPC;
case EXCP_DBE:
cause = 7;
goto set_EPC;
case EXCP_SYSCALL:
cause = 8;
goto set_EPC;
case EXCP_BREAK:
cause = 9;
goto set_EPC;
case EXCP_RI:
cause = 10;
goto set_EPC;
case EXCP_CpU:
cause = 11;
env->CP0_Cause = (env->CP0_Cause & ~(0x3 << CP0Ca_CE)) |
(env->error_code << CP0Ca_CE);
goto set_EPC;
case EXCP_OVERFLOW:
cause = 12;
goto set_EPC;
case EXCP_TRAP:
cause = 13;
goto set_EPC;
case EXCP_FPE:
cause = 15;
goto set_EPC;
case EXCP_C2E:
cause = 18;
goto set_EPC;
case EXCP_MDMX:
cause = 22;
goto set_EPC;
case EXCP_DWATCH:
cause = 23;
/* XXX: TODO: manage defered watch exceptions */
goto set_EPC;
case EXCP_MCHECK:
cause = 24;
goto set_EPC;
case EXCP_THREAD:
cause = 25;
goto set_EPC;
case EXCP_CACHE:
cause = 30;
if (env->CP0_Status & (1 << CP0St_BEV)) {
offset = 0x100;
} else {
offset = 0x20000100;
}
set_EPC:
if (!(env->CP0_Status & (1 << CP0St_EXL))) {
env->CP0_EPC = exception_resume_pc(env);
if (env->hflags & MIPS_HFLAG_BMASK) {
env->CP0_Cause |= (1 << CP0Ca_BD);
} else {
env->CP0_Cause &= ~(1 << CP0Ca_BD);
}
env->CP0_Status |= (1 << CP0St_EXL);
env->hflags |= MIPS_HFLAG_64 | MIPS_HFLAG_CP0;
env->hflags &= ~(MIPS_HFLAG_KSU);
}
env->hflags &= ~MIPS_HFLAG_BMASK;
if (env->CP0_Status & (1 << CP0St_BEV)) {
env->active_tc.PC = (int32_t)0xBFC00200;
} else {
env->active_tc.PC = (int32_t)(env->CP0_EBase & ~0x3ff);
}
env->active_tc.PC += offset;
set_hflags_for_handler(env);
env->CP0_Cause = (env->CP0_Cause & ~(0x1f << CP0Ca_EC)) | (cause << CP0Ca_EC);
break;
default:
qemu_log("Invalid MIPS exception %d. Exiting\n", env->exception_index);
printf("Invalid MIPS exception %d. Exiting\n", env->exception_index);
exit(1);
}
if (qemu_log_enabled() && env->exception_index != EXCP_EXT_INTERRUPT) {
qemu_log("%s: PC " TARGET_FMT_lx " EPC " TARGET_FMT_lx " cause %d\n"
" S %08x C %08x A " TARGET_FMT_lx " D " TARGET_FMT_lx "\n",
__func__, env->active_tc.PC, env->CP0_EPC, cause,
env->CP0_Status, env->CP0_Cause, env->CP0_BadVAddr,
env->CP0_DEPC);
}
#endif
env->exception_index = EXCP_NONE;
}
| 5,181 |
FFmpeg | 276839b8de7ff836a529bbd6221f615a343b23e1 | 0 | static int dxva2_h264_decode_slice(AVCodecContext *avctx,
const uint8_t *buffer,
uint32_t size)
{
const H264Context *h = avctx->priv_data;
struct dxva_context *ctx = avctx->hwaccel_context;
const Picture *current_picture = h->cur_pic_ptr;
struct dxva2_picture_context *ctx_pic = current_picture->hwaccel_picture_private;
unsigned position;
if (ctx_pic->slice_count >= MAX_SLICES)
return -1;
if (!ctx_pic->bitstream)
ctx_pic->bitstream = buffer;
ctx_pic->bitstream_size += size;
position = buffer - ctx_pic->bitstream;
if (is_slice_short(ctx))
fill_slice_short(&ctx_pic->slice_short[ctx_pic->slice_count],
position, size);
else
fill_slice_long(avctx, &ctx_pic->slice_long[ctx_pic->slice_count],
position, size);
ctx_pic->slice_count++;
if (h->slice_type != AV_PICTURE_TYPE_I && h->slice_type != AV_PICTURE_TYPE_SI)
ctx_pic->pp.wBitFields &= ~(1 << 15); /* Set IntraPicFlag to 0 */
return 0;
}
| 5,182 |
FFmpeg | e32eddaa51ad6e84ce9592b9634a788fcda9bad3 | 0 | static int wavpack_encode_block(WavPackEncodeContext *s,
int32_t *samples_l, int32_t *samples_r,
uint8_t *out, int out_size)
{
int block_size, start, end, data_size, tcount, temp, m = 0;
int i, j, ret = 0, got_extra = 0, nb_samples = s->block_samples;
uint32_t crc = 0xffffffffu;
struct Decorr *dpp;
PutByteContext pb;
if (!(s->flags & WV_MONO) && s->optimize_mono) {
int32_t lor = 0, diff = 0;
for (i = 0; i < nb_samples; i++) {
lor |= samples_l[i] | samples_r[i];
diff |= samples_l[i] - samples_r[i];
if (lor && diff)
break;
}
if (i == nb_samples && lor && !diff) {
s->flags &= ~(WV_JOINT_STEREO | WV_CROSS_DECORR);
s->flags |= WV_FALSE_STEREO;
if (!s->false_stereo) {
s->false_stereo = 1;
s->num_terms = 0;
CLEAR(s->w);
}
} else if (s->false_stereo) {
s->false_stereo = 0;
s->num_terms = 0;
CLEAR(s->w);
}
}
if (s->flags & SHIFT_MASK) {
int shift = (s->flags & SHIFT_MASK) >> SHIFT_LSB;
int mag = (s->flags & MAG_MASK) >> MAG_LSB;
if (s->flags & WV_MONO_DATA)
shift_mono(samples_l, nb_samples, shift);
else
shift_stereo(samples_l, samples_r, nb_samples, shift);
if ((mag -= shift) < 0)
s->flags &= ~MAG_MASK;
else
s->flags -= (1 << MAG_LSB) * shift;
}
if ((s->flags & WV_FLOAT_DATA) || (s->flags & MAG_MASK) >> MAG_LSB >= 24) {
av_fast_padded_malloc(&s->orig_l, &s->orig_l_size, sizeof(int32_t) * nb_samples);
memcpy(s->orig_l, samples_l, sizeof(int32_t) * nb_samples);
if (!(s->flags & WV_MONO_DATA)) {
av_fast_padded_malloc(&s->orig_r, &s->orig_r_size, sizeof(int32_t) * nb_samples);
memcpy(s->orig_r, samples_r, sizeof(int32_t) * nb_samples);
}
if (s->flags & WV_FLOAT_DATA)
got_extra = scan_float(s, samples_l, samples_r, nb_samples);
else
got_extra = scan_int32(s, samples_l, samples_r, nb_samples);
s->num_terms = 0;
} else {
scan_int23(s, samples_l, samples_r, nb_samples);
if (s->shift != s->int32_zeros + s->int32_ones + s->int32_dups) {
s->shift = s->int32_zeros + s->int32_ones + s->int32_dups;
s->num_terms = 0;
}
}
if (!s->num_passes && !s->num_terms) {
s->num_passes = 1;
if (s->flags & WV_MONO_DATA)
ret = wv_mono(s, samples_l, 1, 0);
else
ret = wv_stereo(s, samples_l, samples_r, 1, 0);
s->num_passes = 0;
}
if (s->flags & WV_MONO_DATA) {
for (i = 0; i < nb_samples; i++)
crc += (crc << 1) + samples_l[i];
if (s->num_passes)
ret = wv_mono(s, samples_l, !s->num_terms, 1);
} else {
for (i = 0; i < nb_samples; i++)
crc += (crc << 3) + (samples_l[i] << 1) + samples_l[i] + samples_r[i];
if (s->num_passes)
ret = wv_stereo(s, samples_l, samples_r, !s->num_terms, 1);
}
if (ret < 0)
return ret;
if (!s->ch_offset)
s->flags |= WV_INITIAL_BLOCK;
s->ch_offset += 1 + !(s->flags & WV_MONO);
if (s->ch_offset == s->avctx->channels)
s->flags |= WV_FINAL_BLOCK;
bytestream2_init_writer(&pb, out, out_size);
bytestream2_put_le32(&pb, MKTAG('w', 'v', 'p', 'k'));
bytestream2_put_le32(&pb, 0);
bytestream2_put_le16(&pb, 0x410);
bytestream2_put_le16(&pb, 0);
bytestream2_put_le32(&pb, 0);
bytestream2_put_le32(&pb, s->sample_index);
bytestream2_put_le32(&pb, nb_samples);
bytestream2_put_le32(&pb, s->flags);
bytestream2_put_le32(&pb, crc);
if (s->flags & WV_INITIAL_BLOCK &&
s->avctx->channel_layout != AV_CH_LAYOUT_MONO &&
s->avctx->channel_layout != AV_CH_LAYOUT_STEREO) {
put_metadata_block(&pb, WP_ID_CHANINFO, 5);
bytestream2_put_byte(&pb, s->avctx->channels);
bytestream2_put_le32(&pb, s->avctx->channel_layout);
bytestream2_put_byte(&pb, 0);
}
if ((s->flags & SRATE_MASK) == SRATE_MASK) {
put_metadata_block(&pb, WP_ID_SAMPLE_RATE, 3);
bytestream2_put_le24(&pb, s->avctx->sample_rate);
bytestream2_put_byte(&pb, 0);
}
put_metadata_block(&pb, WP_ID_DECTERMS, s->num_terms);
for (i = 0; i < s->num_terms; i++) {
struct Decorr *dpp = &s->decorr_passes[i];
bytestream2_put_byte(&pb, ((dpp->value + 5) & 0x1f) | ((dpp->delta << 5) & 0xe0));
}
if (s->num_terms & 1)
bytestream2_put_byte(&pb, 0);
#define WRITE_DECWEIGHT(type) do { \
temp = store_weight(type); \
bytestream2_put_byte(&pb, temp); \
type = restore_weight(temp); \
} while (0)
bytestream2_put_byte(&pb, WP_ID_DECWEIGHTS);
bytestream2_put_byte(&pb, 0);
start = bytestream2_tell_p(&pb);
for (i = s->num_terms - 1; i >= 0; --i) {
struct Decorr *dpp = &s->decorr_passes[i];
if (store_weight(dpp->weightA) ||
(!(s->flags & WV_MONO_DATA) && store_weight(dpp->weightB)))
break;
}
tcount = i + 1;
for (i = 0; i < s->num_terms; i++) {
struct Decorr *dpp = &s->decorr_passes[i];
if (i < tcount) {
WRITE_DECWEIGHT(dpp->weightA);
if (!(s->flags & WV_MONO_DATA))
WRITE_DECWEIGHT(dpp->weightB);
} else {
dpp->weightA = dpp->weightB = 0;
}
}
end = bytestream2_tell_p(&pb);
out[start - 2] = WP_ID_DECWEIGHTS | (((end - start) & 1) ? WP_IDF_ODD: 0);
out[start - 1] = (end - start + 1) >> 1;
if ((end - start) & 1)
bytestream2_put_byte(&pb, 0);
#define WRITE_DECSAMPLE(type) do { \
temp = log2s(type); \
type = wp_exp2(temp); \
bytestream2_put_le16(&pb, temp); \
} while (0)
bytestream2_put_byte(&pb, WP_ID_DECSAMPLES);
bytestream2_put_byte(&pb, 0);
start = bytestream2_tell_p(&pb);
for (i = 0; i < s->num_terms; i++) {
struct Decorr *dpp = &s->decorr_passes[i];
if (i == 0) {
if (dpp->value > MAX_TERM) {
WRITE_DECSAMPLE(dpp->samplesA[0]);
WRITE_DECSAMPLE(dpp->samplesA[1]);
if (!(s->flags & WV_MONO_DATA)) {
WRITE_DECSAMPLE(dpp->samplesB[0]);
WRITE_DECSAMPLE(dpp->samplesB[1]);
}
} else if (dpp->value < 0) {
WRITE_DECSAMPLE(dpp->samplesA[0]);
WRITE_DECSAMPLE(dpp->samplesB[0]);
} else {
for (j = 0; j < dpp->value; j++) {
WRITE_DECSAMPLE(dpp->samplesA[j]);
if (!(s->flags & WV_MONO_DATA))
WRITE_DECSAMPLE(dpp->samplesB[j]);
}
}
} else {
CLEAR(dpp->samplesA);
CLEAR(dpp->samplesB);
}
}
end = bytestream2_tell_p(&pb);
out[start - 1] = (end - start) >> 1;
#define WRITE_CHAN_ENTROPY(chan) do { \
for (i = 0; i < 3; i++) { \
temp = wp_log2(s->w.c[chan].median[i]); \
bytestream2_put_le16(&pb, temp); \
s->w.c[chan].median[i] = wp_exp2(temp); \
} \
} while (0)
put_metadata_block(&pb, WP_ID_ENTROPY, 6 * (1 + (!(s->flags & WV_MONO_DATA))));
WRITE_CHAN_ENTROPY(0);
if (!(s->flags & WV_MONO_DATA))
WRITE_CHAN_ENTROPY(1);
if (s->flags & WV_FLOAT_DATA) {
put_metadata_block(&pb, WP_ID_FLOATINFO, 4);
bytestream2_put_byte(&pb, s->float_flags);
bytestream2_put_byte(&pb, s->float_shift);
bytestream2_put_byte(&pb, s->float_max_exp);
bytestream2_put_byte(&pb, 127);
}
if (s->flags & WV_INT32_DATA) {
put_metadata_block(&pb, WP_ID_INT32INFO, 4);
bytestream2_put_byte(&pb, s->int32_sent_bits);
bytestream2_put_byte(&pb, s->int32_zeros);
bytestream2_put_byte(&pb, s->int32_ones);
bytestream2_put_byte(&pb, s->int32_dups);
}
if (s->flags & WV_MONO_DATA && !s->num_passes) {
for (i = 0; i < nb_samples; i++) {
int32_t code = samples_l[i];
for (tcount = s->num_terms, dpp = s->decorr_passes; tcount--; dpp++) {
int32_t sam;
if (dpp->value > MAX_TERM) {
if (dpp->value & 1)
sam = 2 * dpp->samplesA[0] - dpp->samplesA[1];
else
sam = (3 * dpp->samplesA[0] - dpp->samplesA[1]) >> 1;
dpp->samplesA[1] = dpp->samplesA[0];
dpp->samplesA[0] = code;
} else {
sam = dpp->samplesA[m];
dpp->samplesA[(m + dpp->value) & (MAX_TERM - 1)] = code;
}
code -= APPLY_WEIGHT(dpp->weightA, sam);
UPDATE_WEIGHT(dpp->weightA, dpp->delta, sam, code);
}
m = (m + 1) & (MAX_TERM - 1);
samples_l[i] = code;
}
if (m) {
for (tcount = s->num_terms, dpp = s->decorr_passes; tcount--; dpp++)
if (dpp->value > 0 && dpp->value <= MAX_TERM) {
int32_t temp_A[MAX_TERM], temp_B[MAX_TERM];
int k;
memcpy(temp_A, dpp->samplesA, sizeof(dpp->samplesA));
memcpy(temp_B, dpp->samplesB, sizeof(dpp->samplesB));
for (k = 0; k < MAX_TERM; k++) {
dpp->samplesA[k] = temp_A[m];
dpp->samplesB[k] = temp_B[m];
m = (m + 1) & (MAX_TERM - 1);
}
}
}
} else if (!s->num_passes) {
if (s->flags & WV_JOINT_STEREO) {
for (i = 0; i < nb_samples; i++)
samples_r[i] += ((samples_l[i] -= samples_r[i]) >> 1);
}
for (i = 0; i < s->num_terms; i++) {
struct Decorr *dpp = &s->decorr_passes[i];
if (((s->flags & MAG_MASK) >> MAG_LSB) >= 16 || dpp->delta != 2)
decorr_stereo_pass2(dpp, samples_l, samples_r, nb_samples);
else
decorr_stereo_pass_id2(dpp, samples_l, samples_r, nb_samples);
}
}
bytestream2_put_byte(&pb, WP_ID_DATA | WP_IDF_LONG);
init_put_bits(&s->pb, pb.buffer + 3, bytestream2_get_bytes_left_p(&pb));
if (s->flags & WV_MONO_DATA) {
for (i = 0; i < nb_samples; i++)
wavpack_encode_sample(s, &s->w.c[0], s->samples[0][i]);
} else {
for (i = 0; i < nb_samples; i++) {
wavpack_encode_sample(s, &s->w.c[0], s->samples[0][i]);
wavpack_encode_sample(s, &s->w.c[1], s->samples[1][i]);
}
}
encode_flush(s);
flush_put_bits(&s->pb);
data_size = put_bits_count(&s->pb) >> 3;
bytestream2_put_le24(&pb, (data_size + 1) >> 1);
bytestream2_skip_p(&pb, data_size);
if (data_size & 1)
bytestream2_put_byte(&pb, 0);
if (got_extra) {
bytestream2_put_byte(&pb, WP_ID_EXTRABITS | WP_IDF_LONG);
init_put_bits(&s->pb, pb.buffer + 7, bytestream2_get_bytes_left_p(&pb));
if (s->flags & WV_FLOAT_DATA)
pack_float(s, s->orig_l, s->orig_r, nb_samples);
else
pack_int32(s, s->orig_l, s->orig_r, nb_samples);
flush_put_bits(&s->pb);
data_size = put_bits_count(&s->pb) >> 3;
bytestream2_put_le24(&pb, (data_size + 5) >> 1);
bytestream2_put_le32(&pb, s->crc_x);
bytestream2_skip_p(&pb, data_size);
if (data_size & 1)
bytestream2_put_byte(&pb, 0);
}
block_size = bytestream2_tell_p(&pb);
AV_WL32(out + 4, block_size - 8);
av_assert0(put_bits_left(&s->pb) > 0);
return block_size;
}
| 5,183 |
FFmpeg | 1509c018bd5b054a2354e20021ccbac9c934d213 | 1 | static int mpegts_probe(AVProbeData *p)
{
const int size = p->buf_size;
int score, fec_score, dvhs_score;
int check_count = size / TS_FEC_PACKET_SIZE;
#define CHECK_COUNT 10
if (check_count < CHECK_COUNT)
return AVERROR_INVALIDDATA;
score = analyze(p->buf, TS_PACKET_SIZE * check_count,
TS_PACKET_SIZE, NULL) * CHECK_COUNT / check_count;
dvhs_score = analyze(p->buf, TS_DVHS_PACKET_SIZE * check_count,
TS_DVHS_PACKET_SIZE, NULL) * CHECK_COUNT / check_count;
fec_score = analyze(p->buf, TS_FEC_PACKET_SIZE * check_count,
TS_FEC_PACKET_SIZE, NULL) * CHECK_COUNT / check_count;
av_dlog(NULL, "score: %d, dvhs_score: %d, fec_score: %d \n",
score, dvhs_score, fec_score);
/* we need a clear definition for the returned score otherwise
* things will become messy sooner or later */
if (score > fec_score && score > dvhs_score && score > 6)
return AVPROBE_SCORE_MAX + score - CHECK_COUNT;
else if (dvhs_score > score && dvhs_score > fec_score && dvhs_score > 6)
return AVPROBE_SCORE_MAX + dvhs_score - CHECK_COUNT;
else if (fec_score > 6)
return AVPROBE_SCORE_MAX + fec_score - CHECK_COUNT;
else
return AVERROR_INVALIDDATA;
}
| 5,184 |
FFmpeg | 0be95996d0a07a2f92105da1ed8c13d239c46ad8 | 1 | static inline int l1_unscale(int n, int mant, int scale_factor)
{
int shift, mod;
int64_t val;
shift = scale_factor_modshift[scale_factor];
mod = shift & 3;
shift >>= 2;
val = MUL64(mant + (-1 << n) + 1, scale_factor_mult[n-1][mod]);
shift += n;
/* NOTE: at this point, 1 <= shift >= 21 + 15 */
return (int)((val + (1LL << (shift - 1))) >> shift);
}
| 5,185 |
FFmpeg | 1f5b6c7e1ee604b1525b3ab84ea6e8817fe66f36 | 1 | static int read_low_coeffs(AVCodecContext *avctx, int16_t *dst, int size, int width, ptrdiff_t stride)
{
PixletContext *ctx = avctx->priv_data;
GetBitContext *b = &ctx->gbit;
unsigned cnt1, nbits, k, j = 0, i = 0;
int64_t value, state = 3;
int rlen, escape, flag = 0;
while (i < size) {
nbits = FFMIN(ff_clz((state >> 8) + 3) ^ 0x1F, 14);
cnt1 = get_unary(b, 0, 8);
if (cnt1 < 8) {
value = show_bits(b, nbits);
if (value <= 1) {
skip_bits(b, nbits - 1);
escape = ((1 << nbits) - 1) * cnt1;
} else {
skip_bits(b, nbits);
escape = value + ((1 << nbits) - 1) * cnt1 - 1;
}
} else {
escape = get_bits(b, 16);
}
value = -((escape + flag) & 1) | 1;
dst[j++] = value * ((escape + flag + 1) >> 1);
i++;
if (j == width) {
j = 0;
dst += stride;
}
state = 120 * (escape + flag) + state - (120 * state >> 8);
flag = 0;
if (state * 4 > 0xFF || i >= size)
continue;
nbits = ((state + 8) >> 5) + (state ? ff_clz(state) : 32) - 24;
escape = av_mod_uintp2(16383, nbits);
cnt1 = get_unary(b, 0, 8);
if (cnt1 > 7) {
rlen = get_bits(b, 16);
} else {
value = show_bits(b, nbits);
if (value > 1) {
skip_bits(b, nbits);
rlen = value + escape * cnt1 - 1;
} else {
skip_bits(b, nbits - 1);
rlen = escape * cnt1;
}
}
if (rlen > size - i)
return AVERROR_INVALIDDATA;
i += rlen;
for (k = 0; k < rlen; k++) {
dst[j++] = 0;
if (j == width) {
j = 0;
dst += stride;
}
}
state = 0;
flag = rlen < 0xFFFF ? 1 : 0;
}
align_get_bits(b);
return get_bits_count(b) >> 3;
}
| 5,186 |
qemu | cfb08fbafcd946341bdf14103293887763802697 | 1 | static void fdctrl_raise_irq(FDCtrl *fdctrl, uint8_t status0)
{
/* Sparc mutation */
if (fdctrl->sun4m && (fdctrl->msr & FD_MSR_CMDBUSY)) {
/* XXX: not sure */
fdctrl->msr &= ~FD_MSR_CMDBUSY;
fdctrl->msr |= FD_MSR_RQM | FD_MSR_DIO;
fdctrl->status0 = status0;
return;
}
if (!(fdctrl->sra & FD_SRA_INTPEND)) {
qemu_set_irq(fdctrl->irq, 1);
fdctrl->sra |= FD_SRA_INTPEND;
}
if (status0 & FD_SR0_SEEK) {
FDrive *cur_drv;
/* A seek clears the disk change line (if a disk is inserted) */
cur_drv = get_cur_drv(fdctrl);
if (cur_drv->max_track) {
cur_drv->media_changed = 0;
}
}
fdctrl->reset_sensei = 0;
fdctrl->status0 = status0;
FLOPPY_DPRINTF("Set interrupt status to 0x%02x\n", fdctrl->status0);
}
| 5,187 |
qemu | 7d91ddd25e3a4e5008a2ac16127d51a34fd56bf1 | 1 | static void qemu_net_queue_append_iov(NetQueue *queue,
NetClientState *sender,
unsigned flags,
const struct iovec *iov,
int iovcnt,
NetPacketSent *sent_cb)
{
NetPacket *packet;
size_t max_len = 0;
int i;
if (queue->nq_count >= queue->nq_maxlen && !sent_cb) {
return; /* drop if queue full and no callback */
}
for (i = 0; i < iovcnt; i++) {
max_len += iov[i].iov_len;
}
packet = g_malloc(sizeof(NetPacket) + max_len);
packet->sender = sender;
packet->sent_cb = sent_cb;
packet->flags = flags;
packet->size = 0;
for (i = 0; i < iovcnt; i++) {
size_t len = iov[i].iov_len;
memcpy(packet->data + packet->size, iov[i].iov_base, len);
packet->size += len;
}
QTAILQ_INSERT_TAIL(&queue->packets, packet, entry);
} | 5,188 |
FFmpeg | 80ceb4696ab7b9c40a0e456a866c473a5291d2f2 | 1 | static int ivr_read_header(AVFormatContext *s)
{
unsigned tag, type, len, tlen, value;
int i, j, n, count, nb_streams, ret;
uint8_t key[256], val[256];
AVIOContext *pb = s->pb;
AVStream *st;
int64_t pos, offset, temp;
pos = avio_tell(pb);
tag = avio_rl32(pb);
if (tag == MKTAG('.','R','1','M')) {
if (avio_rb16(pb) != 1)
return AVERROR_INVALIDDATA;
if (avio_r8(pb) != 1)
return AVERROR_INVALIDDATA;
len = avio_rb32(pb);
avio_skip(pb, len);
avio_skip(pb, 5);
temp = avio_rb64(pb);
while (!avio_feof(pb) && temp) {
offset = temp;
temp = avio_rb64(pb);
}
avio_skip(pb, offset - avio_tell(pb));
if (avio_r8(pb) != 1)
return AVERROR_INVALIDDATA;
len = avio_rb32(pb);
avio_skip(pb, len);
if (avio_r8(pb) != 2)
return AVERROR_INVALIDDATA;
avio_skip(pb, 16);
pos = avio_tell(pb);
tag = avio_rl32(pb);
}
if (tag != MKTAG('.','R','E','C'))
return AVERROR_INVALIDDATA;
if (avio_r8(pb) != 0)
return AVERROR_INVALIDDATA;
count = avio_rb32(pb);
for (i = 0; i < count; i++) {
if (avio_feof(pb))
return AVERROR_INVALIDDATA;
type = avio_r8(pb);
tlen = avio_rb32(pb);
avio_get_str(pb, tlen, key, sizeof(key));
len = avio_rb32(pb);
if (type == 5) {
avio_get_str(pb, len, val, sizeof(val));
av_log(s, AV_LOG_DEBUG, "%s = '%s'\n", key, val);
} else if (type == 4) {
av_log(s, AV_LOG_DEBUG, "%s = '0x", key);
for (j = 0; j < len; j++)
av_log(s, AV_LOG_DEBUG, "%X", avio_r8(pb));
av_log(s, AV_LOG_DEBUG, "'\n");
} else if (len == 4 && type == 3 && !strncmp(key, "StreamCount", tlen)) {
nb_streams = value = avio_rb32(pb);
} else if (len == 4 && type == 3) {
value = avio_rb32(pb);
av_log(s, AV_LOG_DEBUG, "%s = %d\n", key, value);
} else {
av_log(s, AV_LOG_DEBUG, "Skipping unsupported key: %s\n", key);
avio_skip(pb, len);
}
}
for (n = 0; n < nb_streams; n++) {
st = avformat_new_stream(s, NULL);
if (!st)
return AVERROR(ENOMEM);
st->priv_data = ff_rm_alloc_rmstream();
if (!st->priv_data)
return AVERROR(ENOMEM);
if (avio_r8(pb) != 1)
return AVERROR_INVALIDDATA;
count = avio_rb32(pb);
for (i = 0; i < count; i++) {
if (avio_feof(pb))
return AVERROR_INVALIDDATA;
type = avio_r8(pb);
tlen = avio_rb32(pb);
avio_get_str(pb, tlen, key, sizeof(key));
len = avio_rb32(pb);
if (type == 5) {
avio_get_str(pb, len, val, sizeof(val));
av_log(s, AV_LOG_DEBUG, "%s = '%s'\n", key, val);
} else if (type == 4 && !strncmp(key, "OpaqueData", tlen)) {
ret = ffio_ensure_seekback(pb, 4);
if (ret < 0)
return ret;
if (avio_rb32(pb) == MKBETAG('M', 'L', 'T', 'I')) {
ret = rm_read_multi(s, pb, st, NULL);
} else {
avio_seek(pb, -4, SEEK_CUR);
ret = ff_rm_read_mdpr_codecdata(s, pb, st, st->priv_data, len, NULL);
}
if (ret < 0)
return ret;
} else if (type == 4) {
int j;
av_log(s, AV_LOG_DEBUG, "%s = '0x", key);
for (j = 0; j < len; j++)
av_log(s, AV_LOG_DEBUG, "%X", avio_r8(pb));
av_log(s, AV_LOG_DEBUG, "'\n");
} else if (len == 4 && type == 3 && !strncmp(key, "Duration", tlen)) {
st->duration = avio_rb32(pb);
} else if (len == 4 && type == 3) {
value = avio_rb32(pb);
av_log(s, AV_LOG_DEBUG, "%s = %d\n", key, value);
} else {
av_log(s, AV_LOG_DEBUG, "Skipping unsupported key: %s\n", key);
avio_skip(pb, len);
}
}
}
if (avio_r8(pb) != 6)
return AVERROR_INVALIDDATA;
avio_skip(pb, 12);
avio_skip(pb, avio_rb64(pb) + pos - avio_tell(s->pb));
if (avio_r8(pb) != 8)
return AVERROR_INVALIDDATA;
avio_skip(pb, 8);
return 0;
}
| 5,189 |
qemu | 0745eb1e4336bf665a911754d18ddd63794b352d | 1 | static int parse_filter(const char *spec, struct USBAutoFilter *f)
{
enum { BUS, DEV, VID, PID, DONE };
const char *p = spec;
int i;
f->bus_num = -1;
f->addr = -1;
f->vendor_id = -1;
f->product_id = -1;
for (i = BUS; i < DONE; i++) {
p = strpbrk(p, ":.");
if (!p) break;
p++;
if (*p == '*')
continue;
switch(i) {
case BUS: f->bus_num = strtol(p, NULL, 10); break;
case DEV: f->addr = strtol(p, NULL, 10); break;
case VID: f->vendor_id = strtol(p, NULL, 16); break;
case PID: f->product_id = strtol(p, NULL, 16); break;
}
}
if (i < DEV) {
fprintf(stderr, "husb: invalid auto filter spec %s\n", spec);
return -1;
}
return 0;
}
| 5,192 |
FFmpeg | ca16618b01abfde44b4eaf92dc89b01aa1b4a91e | 0 | static void inline xan_wc3_build_palette(XanContext *s,
unsigned int *palette_data)
{
int i;
unsigned char r, g, b;
unsigned short *palette16;
unsigned int *palette32;
unsigned int pal_elem;
/* transform the palette passed through the palette control structure
* into the necessary internal format depending on colorspace */
switch (s->avctx->pix_fmt) {
case PIX_FMT_RGB555:
palette16 = (unsigned short *)s->palette;
for (i = 0; i < PALETTE_COUNT; i++) {
pal_elem = palette_data[i];
r = (pal_elem >> 16) & 0xff;
g = (pal_elem >> 8) & 0xff;
b = pal_elem & 0xff;
palette16[i] =
((r >> 3) << 10) |
((g >> 3) << 5) |
((b >> 3) << 0);
}
break;
case PIX_FMT_RGB565:
palette16 = (unsigned short *)s->palette;
for (i = 0; i < PALETTE_COUNT; i++) {
pal_elem = palette_data[i];
r = (pal_elem >> 16) & 0xff;
g = (pal_elem >> 8) & 0xff;
b = pal_elem & 0xff;
palette16[i] =
((r >> 3) << 11) |
((g >> 2) << 5) |
((b >> 3) << 0);
}
break;
case PIX_FMT_RGB24:
for (i = 0; i < PALETTE_COUNT; i++) {
pal_elem = palette_data[i];
r = (pal_elem >> 16) & 0xff;
g = (pal_elem >> 8) & 0xff;
b = pal_elem & 0xff;
s->palette[i * 4 + 0] = r;
s->palette[i * 4 + 1] = g;
s->palette[i * 4 + 2] = b;
}
break;
case PIX_FMT_BGR24:
for (i = 0; i < PALETTE_COUNT; i++) {
pal_elem = palette_data[i];
r = (pal_elem >> 16) & 0xff;
g = (pal_elem >> 8) & 0xff;
b = pal_elem & 0xff;
s->palette[i * 4 + 0] = b;
s->palette[i * 4 + 1] = g;
s->palette[i * 4 + 2] = r;
}
break;
case PIX_FMT_PAL8:
case PIX_FMT_RGBA32:
palette32 = (unsigned int *)s->palette;
memcpy (palette32, palette_data, PALETTE_COUNT * sizeof(unsigned int));
break;
case PIX_FMT_YUV444P:
for (i = 0; i < PALETTE_COUNT; i++) {
pal_elem = palette_data[i];
r = (pal_elem >> 16) & 0xff;
g = (pal_elem >> 8) & 0xff;
b = pal_elem & 0xff;
s->palette[i * 4 + 0] = COMPUTE_Y(r, g, b);
s->palette[i * 4 + 1] = COMPUTE_U(r, g, b);
s->palette[i * 4 + 2] = COMPUTE_V(r, g, b);
}
break;
default:
av_log(s->avctx, AV_LOG_ERROR, " Xan WC3: Unhandled colorspace\n");
break;
}
}
| 5,193 |
FFmpeg | 7f2fe444a39bca733d390b6608801c5f002bfd31 | 0 | int MPV_common_init(MpegEncContext *s)
{
int c_size, i;
UINT8 *pict;
s->dct_unquantize_h263 = dct_unquantize_h263_c;
s->dct_unquantize_mpeg1 = dct_unquantize_mpeg1_c;
s->dct_unquantize_mpeg2 = dct_unquantize_mpeg2_c;
#ifdef HAVE_MMX
MPV_common_init_mmx(s);
#endif
//setup default unquantizers (mpeg4 might change it later)
if(s->out_format == FMT_H263)
s->dct_unquantize = s->dct_unquantize_h263;
else
s->dct_unquantize = s->dct_unquantize_mpeg1;
s->mb_width = (s->width + 15) / 16;
s->mb_height = (s->height + 15) / 16;
s->mb_num = s->mb_width * s->mb_height;
s->linesize = s->mb_width * 16 + 2 * EDGE_WIDTH;
for(i=0;i<3;i++) {
int w, h, shift, pict_start;
w = s->linesize;
h = s->mb_height * 16 + 2 * EDGE_WIDTH;
shift = (i == 0) ? 0 : 1;
c_size = (w >> shift) * (h >> shift);
pict_start = (w >> shift) * (EDGE_WIDTH >> shift) + (EDGE_WIDTH >> shift);
pict = av_mallocz(c_size);
if (pict == NULL)
goto fail;
s->last_picture_base[i] = pict;
s->last_picture[i] = pict + pict_start;
pict = av_mallocz(c_size);
if (pict == NULL)
goto fail;
s->next_picture_base[i] = pict;
s->next_picture[i] = pict + pict_start;
if (s->has_b_frames || s->codec_id==CODEC_ID_MPEG4) {
/* Note the MPEG4 stuff is here cuz of buggy encoders which dont set the low_delay flag but
do low-delay encoding, so we cant allways distinguish b-frame containing streams from low_delay streams */
pict = av_mallocz(c_size);
if (pict == NULL)
goto fail;
s->aux_picture_base[i] = pict;
s->aux_picture[i] = pict + pict_start;
}
}
if (s->encoding) {
int j;
int mv_table_size= (s->mb_width+2)*(s->mb_height+2);
/* Allocate MB type table */
s->mb_type = av_mallocz(s->mb_num * sizeof(char));
if (s->mb_type == NULL) {
perror("malloc");
goto fail;
}
s->mb_var = av_mallocz(s->mb_num * sizeof(INT16));
if (s->mb_var == NULL) {
perror("malloc");
goto fail;
}
/* Allocate MV tables */
s->p_mv_table = av_mallocz(mv_table_size * 2 * sizeof(INT16));
if (s->p_mv_table == NULL) {
perror("malloc");
goto fail;
}
s->last_p_mv_table = av_mallocz(mv_table_size * 2 * sizeof(INT16));
if (s->last_p_mv_table == NULL) {
perror("malloc");
goto fail;
}
s->b_forw_mv_table = av_mallocz(mv_table_size * 2 * sizeof(INT16));
if (s->b_forw_mv_table == NULL) {
perror("malloc");
goto fail;
}
s->b_back_mv_table = av_mallocz(mv_table_size * 2 * sizeof(INT16));
if (s->b_back_mv_table == NULL) {
perror("malloc");
goto fail;
}
s->b_bidir_forw_mv_table = av_mallocz(mv_table_size * 2 * sizeof(INT16));
if (s->b_bidir_forw_mv_table == NULL) {
perror("malloc");
goto fail;
}
s->b_bidir_back_mv_table = av_mallocz(mv_table_size * 2 * sizeof(INT16));
if (s->b_bidir_back_mv_table == NULL) {
perror("malloc");
goto fail;
}
s->b_direct_forw_mv_table = av_mallocz(mv_table_size * 2 * sizeof(INT16));
if (s->b_direct_forw_mv_table == NULL) {
perror("malloc");
goto fail;
}
s->b_direct_back_mv_table = av_mallocz(mv_table_size * 2 * sizeof(INT16));
if (s->b_direct_back_mv_table == NULL) {
perror("malloc");
goto fail;
}
s->b_direct_mv_table = av_mallocz(mv_table_size * 2 * sizeof(INT16));
if (s->b_direct_mv_table == NULL) {
perror("malloc");
goto fail;
}
s->me_scratchpad = av_mallocz( s->linesize*16*3*sizeof(uint8_t));
if (s->me_scratchpad == NULL) {
perror("malloc");
goto fail;
}
if(s->max_b_frames){
for(j=0; j<REORDER_BUFFER_SIZE; j++){
int i;
for(i=0;i<3;i++) {
int w, h, shift;
w = s->linesize;
h = s->mb_height * 16;
shift = (i == 0) ? 0 : 1;
c_size = (w >> shift) * (h >> shift);
pict = av_mallocz(c_size);
if (pict == NULL)
goto fail;
s->picture_buffer[j][i] = pict;
}
}
}
}
if (s->out_format == FMT_H263 || s->encoding) {
int size;
/* MV prediction */
size = (2 * s->mb_width + 2) * (2 * s->mb_height + 2);
s->motion_val = av_malloc(size * 2 * sizeof(INT16));
if (s->motion_val == NULL)
goto fail;
memset(s->motion_val, 0, size * 2 * sizeof(INT16));
}
if (s->h263_pred || s->h263_plus) {
int y_size, c_size, i, size;
/* dc values */
y_size = (2 * s->mb_width + 2) * (2 * s->mb_height + 2);
c_size = (s->mb_width + 2) * (s->mb_height + 2);
size = y_size + 2 * c_size;
s->dc_val[0] = av_malloc(size * sizeof(INT16));
if (s->dc_val[0] == NULL)
goto fail;
s->dc_val[1] = s->dc_val[0] + y_size;
s->dc_val[2] = s->dc_val[1] + c_size;
for(i=0;i<size;i++)
s->dc_val[0][i] = 1024;
/* ac values */
s->ac_val[0] = av_mallocz(size * sizeof(INT16) * 16);
if (s->ac_val[0] == NULL)
goto fail;
s->ac_val[1] = s->ac_val[0] + y_size;
s->ac_val[2] = s->ac_val[1] + c_size;
/* cbp values */
s->coded_block = av_mallocz(y_size);
if (!s->coded_block)
goto fail;
/* which mb is a intra block */
s->mbintra_table = av_mallocz(s->mb_num);
if (!s->mbintra_table)
goto fail;
memset(s->mbintra_table, 1, s->mb_num);
/* divx501 bitstream reorder buffer */
s->bitstream_buffer= av_mallocz(BITSTREAM_BUFFER_SIZE);
if (!s->bitstream_buffer)
goto fail;
}
/* default structure is frame */
s->picture_structure = PICT_FRAME;
/* init macroblock skip table */
s->mbskip_table = av_mallocz(s->mb_num);
if (!s->mbskip_table)
goto fail;
s->block= s->blocks[0];
s->context_initialized = 1;
return 0;
fail:
MPV_common_end(s);
return -1;
}
| 5,194 |
Subsets and Splits