project
stringclasses 2
values | commit_id
stringlengths 40
40
| target
int64 0
1
| func
stringlengths 26
142k
| idx
int64 0
27.3k
|
---|---|---|---|---|
FFmpeg | 4257b804e2354db07e66ebfd966d7d13f49c7895 | 0 | static int decode_video(InputStream *ist, AVPacket *pkt, int *got_output)
{
AVFrame *decoded_frame, *f;
void *buffer_to_free = NULL;
int i, ret = 0, err = 0, resample_changed;
int64_t best_effort_timestamp;
AVRational *frame_sample_aspect;
if (!ist->decoded_frame && !(ist->decoded_frame = av_frame_alloc()))
return AVERROR(ENOMEM);
if (!ist->filter_frame && !(ist->filter_frame = av_frame_alloc()))
return AVERROR(ENOMEM);
decoded_frame = ist->decoded_frame;
pkt->dts = av_rescale_q(ist->dts, AV_TIME_BASE_Q, ist->st->time_base);
update_benchmark(NULL);
ret = avcodec_decode_video2(ist->st->codec,
decoded_frame, got_output, pkt);
update_benchmark("decode_video %d.%d", ist->file_index, ist->st->index);
if (!*got_output || ret < 0) {
if (!pkt->size) {
for (i = 0; i < ist->nb_filters; i++)
#if 1
av_buffersrc_add_ref(ist->filters[i]->filter, NULL, 0);
#else
av_buffersrc_add_frame(ist->filters[i]->filter, NULL);
#endif
}
return ret;
}
if(ist->top_field_first>=0)
decoded_frame->top_field_first = ist->top_field_first;
best_effort_timestamp= av_frame_get_best_effort_timestamp(decoded_frame);
if(best_effort_timestamp != AV_NOPTS_VALUE)
ist->next_pts = ist->pts = av_rescale_q(decoded_frame->pts = best_effort_timestamp, ist->st->time_base, AV_TIME_BASE_Q);
if (debug_ts) {
av_log(NULL, AV_LOG_INFO, "decoder -> ist_index:%d type:video "
"frame_pts:%s frame_pts_time:%s best_effort_ts:%"PRId64" best_effort_ts_time:%s keyframe:%d frame_type:%d \n",
ist->st->index, av_ts2str(decoded_frame->pts),
av_ts2timestr(decoded_frame->pts, &ist->st->time_base),
best_effort_timestamp,
av_ts2timestr(best_effort_timestamp, &ist->st->time_base),
decoded_frame->key_frame, decoded_frame->pict_type);
}
pkt->size = 0;
#if FF_API_DEINTERLACE
pre_process_video_frame(ist, (AVPicture *)decoded_frame, &buffer_to_free);
#endif
rate_emu_sleep(ist);
if (ist->st->sample_aspect_ratio.num)
decoded_frame->sample_aspect_ratio = ist->st->sample_aspect_ratio;
resample_changed = ist->resample_width != decoded_frame->width ||
ist->resample_height != decoded_frame->height ||
ist->resample_pix_fmt != decoded_frame->format;
if (resample_changed) {
av_log(NULL, AV_LOG_INFO,
"Input stream #%d:%d frame changed from size:%dx%d fmt:%s to size:%dx%d fmt:%s\n",
ist->file_index, ist->st->index,
ist->resample_width, ist->resample_height, av_get_pix_fmt_name(ist->resample_pix_fmt),
decoded_frame->width, decoded_frame->height, av_get_pix_fmt_name(decoded_frame->format));
ist->resample_width = decoded_frame->width;
ist->resample_height = decoded_frame->height;
ist->resample_pix_fmt = decoded_frame->format;
for (i = 0; i < nb_filtergraphs; i++) {
if (ist_in_filtergraph(filtergraphs[i], ist) && ist->reinit_filters &&
configure_filtergraph(filtergraphs[i]) < 0) {
av_log(NULL, AV_LOG_FATAL, "Error reinitializing filters!\n");
exit(1);
}
}
}
frame_sample_aspect= av_opt_ptr(avcodec_get_frame_class(), decoded_frame, "sample_aspect_ratio");
for (i = 0; i < ist->nb_filters; i++) {
if (!frame_sample_aspect->num)
*frame_sample_aspect = ist->st->sample_aspect_ratio;
if (i < ist->nb_filters - 1) {
f = ist->filter_frame;
err = av_frame_ref(f, decoded_frame);
if (err < 0)
break;
} else
f = decoded_frame;
if(av_buffersrc_add_frame_flags(ist->filters[i]->filter, f,
AV_BUFFERSRC_FLAG_PUSH)<0) {
av_log(NULL, AV_LOG_FATAL, "Failed to inject frame into filter network\n");
exit(1);
}
}
av_frame_unref(ist->filter_frame);
av_frame_unref(decoded_frame);
av_free(buffer_to_free);
return err < 0 ? err : ret;
}
| 4,680 |
FFmpeg | b2a4316287ea814168b0b794bd7ab0063fd1dd0f | 0 | static void audio_encode_example(const char *filename)
{
AVCodec *codec;
AVCodecContext *c= NULL;
AVFrame *frame;
AVPacket pkt;
int i, j, k, ret, got_output;
int buffer_size;
FILE *f;
uint16_t *samples;
float t, tincr;
printf("Encode audio file %s\n", filename);
/* find the MP2 encoder */
codec = avcodec_find_encoder(AV_CODEC_ID_MP2);
if (!codec) {
fprintf(stderr, "Codec not found\n");
exit(1);
}
c = avcodec_alloc_context3(codec);
if (!c) {
fprintf(stderr, "Could not allocate audio codec context\n");
exit(1);
}
/* put sample parameters */
c->bit_rate = 64000;
/* check that the encoder supports s16 pcm input */
c->sample_fmt = AV_SAMPLE_FMT_S16;
if (!check_sample_fmt(codec, c->sample_fmt)) {
fprintf(stderr, "Encoder does not support sample format %s",
av_get_sample_fmt_name(c->sample_fmt));
exit(1);
}
/* select other audio parameters supported by the encoder */
c->sample_rate = select_sample_rate(codec);
c->channel_layout = select_channel_layout(codec);
c->channels = av_get_channel_layout_nb_channels(c->channel_layout);
/* open it */
if (avcodec_open2(c, codec, NULL) < 0) {
fprintf(stderr, "Could not open codec\n");
exit(1);
}
f = fopen(filename, "wb");
if (!f) {
fprintf(stderr, "Could not open %s\n", filename);
exit(1);
}
/* frame containing input raw audio */
frame = av_frame_alloc();
if (!frame) {
fprintf(stderr, "Could not allocate audio frame\n");
exit(1);
}
frame->nb_samples = c->frame_size;
frame->format = c->sample_fmt;
frame->channel_layout = c->channel_layout;
/* the codec gives us the frame size, in samples,
* we calculate the size of the samples buffer in bytes */
buffer_size = av_samples_get_buffer_size(NULL, c->channels, c->frame_size,
c->sample_fmt, 0);
if (!buffer_size) {
fprintf(stderr, "Could not get sample buffer size\n");
exit(1);
}
samples = av_malloc(buffer_size);
if (!samples) {
fprintf(stderr, "Could not allocate %d bytes for samples buffer\n",
buffer_size);
exit(1);
}
/* setup the data pointers in the AVFrame */
ret = avcodec_fill_audio_frame(frame, c->channels, c->sample_fmt,
(const uint8_t*)samples, buffer_size, 0);
if (ret < 0) {
fprintf(stderr, "Could not setup audio frame\n");
exit(1);
}
/* encode a single tone sound */
t = 0;
tincr = 2 * M_PI * 440.0 / c->sample_rate;
for(i=0;i<200;i++) {
av_init_packet(&pkt);
pkt.data = NULL; // packet data will be allocated by the encoder
pkt.size = 0;
for (j = 0; j < c->frame_size; j++) {
samples[2*j] = (int)(sin(t) * 10000);
for (k = 1; k < c->channels; k++)
samples[2*j + k] = samples[2*j];
t += tincr;
}
/* encode the samples */
ret = avcodec_encode_audio2(c, &pkt, frame, &got_output);
if (ret < 0) {
fprintf(stderr, "Error encoding audio frame\n");
exit(1);
}
if (got_output) {
fwrite(pkt.data, 1, pkt.size, f);
av_free_packet(&pkt);
}
}
/* get the delayed frames */
for (got_output = 1; got_output; i++) {
ret = avcodec_encode_audio2(c, &pkt, NULL, &got_output);
if (ret < 0) {
fprintf(stderr, "Error encoding frame\n");
exit(1);
}
if (got_output) {
fwrite(pkt.data, 1, pkt.size, f);
av_free_packet(&pkt);
}
}
fclose(f);
av_freep(&samples);
av_frame_free(&frame);
avcodec_close(c);
av_free(c);
}
| 4,682 |
FFmpeg | e46a6fb7732a7caef97a916a4f765ec0f779d195 | 0 | static void avconv_cleanup(int ret)
{
int i, j;
for (i = 0; i < nb_filtergraphs; i++) {
FilterGraph *fg = filtergraphs[i];
avfilter_graph_free(&fg->graph);
for (j = 0; j < fg->nb_inputs; j++) {
while (av_fifo_size(fg->inputs[j]->frame_queue)) {
AVFrame *frame;
av_fifo_generic_read(fg->inputs[j]->frame_queue, &frame,
sizeof(frame), NULL);
av_frame_free(&frame);
}
av_fifo_free(fg->inputs[j]->frame_queue);
av_buffer_unref(&fg->inputs[j]->hw_frames_ctx);
av_freep(&fg->inputs[j]->name);
av_freep(&fg->inputs[j]);
}
av_freep(&fg->inputs);
for (j = 0; j < fg->nb_outputs; j++) {
av_freep(&fg->outputs[j]->name);
av_freep(&fg->outputs[j]->formats);
av_freep(&fg->outputs[j]->channel_layouts);
av_freep(&fg->outputs[j]->sample_rates);
av_freep(&fg->outputs[j]);
}
av_freep(&fg->outputs);
av_freep(&fg->graph_desc);
av_freep(&filtergraphs[i]);
}
av_freep(&filtergraphs);
/* close files */
for (i = 0; i < nb_output_files; i++) {
OutputFile *of = output_files[i];
AVFormatContext *s = of->ctx;
if (s && s->oformat && !(s->oformat->flags & AVFMT_NOFILE) && s->pb)
avio_close(s->pb);
avformat_free_context(s);
av_dict_free(&of->opts);
av_freep(&output_files[i]);
}
for (i = 0; i < nb_output_streams; i++) {
OutputStream *ost = output_streams[i];
for (j = 0; j < ost->nb_bitstream_filters; j++)
av_bsf_free(&ost->bsf_ctx[j]);
av_freep(&ost->bsf_ctx);
av_freep(&ost->bitstream_filters);
av_frame_free(&ost->filtered_frame);
av_parser_close(ost->parser);
avcodec_free_context(&ost->parser_avctx);
av_freep(&ost->forced_keyframes);
av_freep(&ost->avfilter);
av_freep(&ost->logfile_prefix);
avcodec_free_context(&ost->enc_ctx);
while (av_fifo_size(ost->muxing_queue)) {
AVPacket pkt;
av_fifo_generic_read(ost->muxing_queue, &pkt, sizeof(pkt), NULL);
av_packet_unref(&pkt);
}
av_fifo_free(ost->muxing_queue);
av_freep(&output_streams[i]);
}
for (i = 0; i < nb_input_files; i++) {
avformat_close_input(&input_files[i]->ctx);
av_freep(&input_files[i]);
}
for (i = 0; i < nb_input_streams; i++) {
InputStream *ist = input_streams[i];
av_frame_free(&ist->decoded_frame);
av_frame_free(&ist->filter_frame);
av_dict_free(&ist->decoder_opts);
av_freep(&ist->filters);
av_freep(&ist->hwaccel_device);
avcodec_free_context(&ist->dec_ctx);
av_freep(&input_streams[i]);
}
if (vstats_file)
fclose(vstats_file);
av_free(vstats_filename);
av_freep(&input_streams);
av_freep(&input_files);
av_freep(&output_streams);
av_freep(&output_files);
uninit_opts();
avformat_network_deinit();
if (received_sigterm) {
av_log(NULL, AV_LOG_INFO, "Received signal %d: terminating.\n",
(int) received_sigterm);
exit (255);
}
}
| 4,683 |
FFmpeg | 5fba300d02f693de3c741e07740a851b2b3a94c7 | 0 | static void dump_stream_format(AVFormatContext *ic, int i, int index, int is_output)
{
char buf[256];
int flags = (is_output ? ic->oformat->flags : ic->iformat->flags);
AVStream *st = ic->streams[i];
int g = ff_gcd(st->time_base.num, st->time_base.den);
avcodec_string(buf, sizeof(buf), st->codec, is_output);
av_log(NULL, AV_LOG_INFO, " Stream #%d.%d", index, i);
/* the pid is an important information, so we display it */
/* XXX: add a generic system */
if (flags & AVFMT_SHOW_IDS)
av_log(NULL, AV_LOG_INFO, "[0x%x]", st->id);
if (strlen(st->language) > 0)
av_log(NULL, AV_LOG_INFO, "(%s)", st->language);
av_log(NULL, AV_LOG_DEBUG, ", %d/%d", st->time_base.num/g, st->time_base.den/g);
av_log(NULL, AV_LOG_INFO, ": %s", buf);
if(st->codec->codec_type == CODEC_TYPE_VIDEO){
if(st->r_frame_rate.den && st->r_frame_rate.num)
av_log(NULL, AV_LOG_INFO, ", %5.2f fps(r)", av_q2d(st->r_frame_rate));
/* else if(st->time_base.den && st->time_base.num)
av_log(NULL, AV_LOG_INFO, ", %5.2f fps(m)", 1/av_q2d(st->time_base));*/
else
av_log(NULL, AV_LOG_INFO, ", %5.2f fps(c)", 1/av_q2d(st->codec->time_base));
}
av_log(NULL, AV_LOG_INFO, "\n");
}
| 4,684 |
FFmpeg | 3faa303a47e0c3b59a53988e0f76018930c6cb1a | 0 | static inline void rv34_decode_block(DCTELEM *dst, GetBitContext *gb, RV34VLC *rvlc, int fc, int sc, int q_dc, int q_ac1, int q_ac2)
{
int code, pattern;
code = get_vlc2(gb, rvlc->first_pattern[fc].table, 9, 2);
pattern = code & 0x7;
code >>= 3;
decode_subblock3(dst, code, 0, gb, &rvlc->coefficient, q_dc, q_ac1, q_ac2);
if(pattern & 4){
code = get_vlc2(gb, rvlc->second_pattern[sc].table, 9, 2);
decode_subblock(dst + 2, code, 0, gb, &rvlc->coefficient, q_ac2);
}
if(pattern & 2){ // Looks like coefficients 1 and 2 are swapped for this block
code = get_vlc2(gb, rvlc->second_pattern[sc].table, 9, 2);
decode_subblock(dst + 8*2, code, 1, gb, &rvlc->coefficient, q_ac2);
}
if(pattern & 1){
code = get_vlc2(gb, rvlc->third_pattern[sc].table, 9, 2);
decode_subblock(dst + 8*2+2, code, 0, gb, &rvlc->coefficient, q_ac2);
}
}
| 4,687 |
qemu | 03a96b83b539498510e22aab585e41015ba18247 | 1 | static int kvm_set_ioeventfd_mmio(int fd, hwaddr addr, uint32_t val,
bool assign, uint32_t size, bool datamatch)
{
int ret;
struct kvm_ioeventfd iofd;
iofd.datamatch = datamatch ? adjust_ioeventfd_endianness(val, size) : 0;
iofd.addr = addr;
iofd.len = size;
iofd.flags = 0;
iofd.fd = fd;
if (!kvm_enabled()) {
return -ENOSYS;
}
if (datamatch) {
iofd.flags |= KVM_IOEVENTFD_FLAG_DATAMATCH;
}
if (!assign) {
iofd.flags |= KVM_IOEVENTFD_FLAG_DEASSIGN;
}
ret = kvm_vm_ioctl(kvm_state, KVM_IOEVENTFD, &iofd);
if (ret < 0) {
return -errno;
}
return 0;
}
| 4,688 |
FFmpeg | 5804201cbac2de8824013a8294e381e93bbe45f2 | 1 | AVFrameSideData *av_frame_new_side_data(AVFrame *frame,
enum AVFrameSideDataType type,
int size)
{
AVFrameSideData *ret, **tmp;
if (frame->nb_side_data > INT_MAX / sizeof(*frame->side_data) - 1)
return NULL;
tmp = av_realloc(frame->side_data,
(frame->nb_side_data + 1) * sizeof(*frame->side_data));
if (!tmp)
return NULL;
frame->side_data = tmp;
ret = av_mallocz(sizeof(*ret));
if (!ret)
return NULL;
if (size > 0) {
ret->buf = av_buffer_alloc(size);
if (!ret->buf) {
av_freep(&ret);
return NULL;
}
ret->data = ret->buf->data;
ret->size = size;
}
ret->type = type;
frame->side_data[frame->nb_side_data++] = ret;
return ret;
}
| 4,689 |
qemu | 3cb0e25c4b417b7336816bd92de458f0770d49ff | 1 | DriveInfo *drive_init(QemuOpts *all_opts, BlockInterfaceType block_default_type)
{
const char *value;
DriveInfo *dinfo = NULL;
QDict *bs_opts;
QemuOpts *legacy_opts;
DriveMediaType media = MEDIA_DISK;
BlockInterfaceType type;
int cyls, heads, secs, translation;
int max_devs, bus_id, unit_id, index;
const char *devaddr;
const char *werror, *rerror;
bool read_only = false;
bool copy_on_read;
const char *filename;
Error *local_err = NULL;
/* Change legacy command line options into QMP ones */
qemu_opt_rename(all_opts, "iops", "throttling.iops-total");
qemu_opt_rename(all_opts, "iops_rd", "throttling.iops-read");
qemu_opt_rename(all_opts, "iops_wr", "throttling.iops-write");
qemu_opt_rename(all_opts, "bps", "throttling.bps-total");
qemu_opt_rename(all_opts, "bps_rd", "throttling.bps-read");
qemu_opt_rename(all_opts, "bps_wr", "throttling.bps-write");
qemu_opt_rename(all_opts, "iops_max", "throttling.iops-total-max");
qemu_opt_rename(all_opts, "iops_rd_max", "throttling.iops-read-max");
qemu_opt_rename(all_opts, "iops_wr_max", "throttling.iops-write-max");
qemu_opt_rename(all_opts, "bps_max", "throttling.bps-total-max");
qemu_opt_rename(all_opts, "bps_rd_max", "throttling.bps-read-max");
qemu_opt_rename(all_opts, "bps_wr_max", "throttling.bps-write-max");
qemu_opt_rename(all_opts,
"iops_size", "throttling.iops-size");
qemu_opt_rename(all_opts, "readonly", "read-only");
value = qemu_opt_get(all_opts, "cache");
if (value) {
int flags = 0;
if (bdrv_parse_cache_flags(value, &flags) != 0) {
error_report("invalid cache option");
return NULL;
}
/* Specific options take precedence */
if (!qemu_opt_get(all_opts, "cache.writeback")) {
qemu_opt_set_bool(all_opts, "cache.writeback",
!!(flags & BDRV_O_CACHE_WB));
}
if (!qemu_opt_get(all_opts, "cache.direct")) {
qemu_opt_set_bool(all_opts, "cache.direct",
!!(flags & BDRV_O_NOCACHE));
}
if (!qemu_opt_get(all_opts, "cache.no-flush")) {
qemu_opt_set_bool(all_opts, "cache.no-flush",
!!(flags & BDRV_O_NO_FLUSH));
}
qemu_opt_unset(all_opts, "cache");
}
/* Get a QDict for processing the options */
bs_opts = qdict_new();
qemu_opts_to_qdict(all_opts, bs_opts);
legacy_opts = qemu_opts_create(&qemu_legacy_drive_opts, NULL, 0,
&error_abort);
qemu_opts_absorb_qdict(legacy_opts, bs_opts, &local_err);
if (local_err) {
error_report("%s", error_get_pretty(local_err));
error_free(local_err);
goto fail;
}
/* Deprecated option boot=[on|off] */
if (qemu_opt_get(legacy_opts, "boot") != NULL) {
fprintf(stderr, "qemu-kvm: boot=on|off is deprecated and will be "
"ignored. Future versions will reject this parameter. Please "
"update your scripts.\n");
}
/* Media type */
value = qemu_opt_get(legacy_opts, "media");
if (value) {
if (!strcmp(value, "disk")) {
media = MEDIA_DISK;
} else if (!strcmp(value, "cdrom")) {
media = MEDIA_CDROM;
read_only = true;
} else {
error_report("'%s' invalid media", value);
goto fail;
}
}
/* copy-on-read is disabled with a warning for read-only devices */
read_only |= qemu_opt_get_bool(legacy_opts, "read-only", false);
copy_on_read = qemu_opt_get_bool(legacy_opts, "copy-on-read", false);
if (read_only && copy_on_read) {
error_report("warning: disabling copy-on-read on read-only drive");
copy_on_read = false;
}
qdict_put(bs_opts, "read-only",
qstring_from_str(read_only ? "on" : "off"));
qdict_put(bs_opts, "copy-on-read",
qstring_from_str(copy_on_read ? "on" :"off"));
/* Controller type */
value = qemu_opt_get(legacy_opts, "if");
if (value) {
for (type = 0;
type < IF_COUNT && strcmp(value, if_name[type]);
type++) {
}
if (type == IF_COUNT) {
error_report("unsupported bus type '%s'", value);
goto fail;
}
} else {
type = block_default_type;
}
/* Geometry */
cyls = qemu_opt_get_number(legacy_opts, "cyls", 0);
heads = qemu_opt_get_number(legacy_opts, "heads", 0);
secs = qemu_opt_get_number(legacy_opts, "secs", 0);
if (cyls || heads || secs) {
if (cyls < 1) {
error_report("invalid physical cyls number");
goto fail;
}
if (heads < 1) {
error_report("invalid physical heads number");
goto fail;
}
if (secs < 1) {
error_report("invalid physical secs number");
goto fail;
}
}
translation = BIOS_ATA_TRANSLATION_AUTO;
value = qemu_opt_get(legacy_opts, "trans");
if (value != NULL) {
if (!cyls) {
error_report("'%s' trans must be used with cyls, heads and secs",
value);
goto fail;
}
if (!strcmp(value, "none")) {
translation = BIOS_ATA_TRANSLATION_NONE;
} else if (!strcmp(value, "lba")) {
translation = BIOS_ATA_TRANSLATION_LBA;
} else if (!strcmp(value, "large")) {
translation = BIOS_ATA_TRANSLATION_LARGE;
} else if (!strcmp(value, "rechs")) {
translation = BIOS_ATA_TRANSLATION_RECHS;
} else if (!strcmp(value, "auto")) {
translation = BIOS_ATA_TRANSLATION_AUTO;
} else {
error_report("'%s' invalid translation type", value);
goto fail;
}
}
if (media == MEDIA_CDROM) {
if (cyls || secs || heads) {
error_report("CHS can't be set with media=cdrom");
goto fail;
}
}
/* Device address specified by bus/unit or index.
* If none was specified, try to find the first free one. */
bus_id = qemu_opt_get_number(legacy_opts, "bus", 0);
unit_id = qemu_opt_get_number(legacy_opts, "unit", -1);
index = qemu_opt_get_number(legacy_opts, "index", -1);
max_devs = if_max_devs[type];
if (index != -1) {
if (bus_id != 0 || unit_id != -1) {
error_report("index cannot be used with bus and unit");
goto fail;
}
bus_id = drive_index_to_bus_id(type, index);
unit_id = drive_index_to_unit_id(type, index);
}
if (unit_id == -1) {
unit_id = 0;
while (drive_get(type, bus_id, unit_id) != NULL) {
unit_id++;
if (max_devs && unit_id >= max_devs) {
unit_id -= max_devs;
bus_id++;
}
}
}
if (max_devs && unit_id >= max_devs) {
error_report("unit %d too big (max is %d)", unit_id, max_devs - 1);
goto fail;
}
if (drive_get(type, bus_id, unit_id) != NULL) {
error_report("drive with bus=%d, unit=%d (index=%d) exists",
bus_id, unit_id, index);
goto fail;
}
/* no id supplied -> create one */
if (qemu_opts_id(all_opts) == NULL) {
char *new_id;
const char *mediastr = "";
if (type == IF_IDE || type == IF_SCSI) {
mediastr = (media == MEDIA_CDROM) ? "-cd" : "-hd";
}
if (max_devs) {
new_id = g_strdup_printf("%s%i%s%i", if_name[type], bus_id,
mediastr, unit_id);
} else {
new_id = g_strdup_printf("%s%s%i", if_name[type],
mediastr, unit_id);
}
qdict_put(bs_opts, "id", qstring_from_str(new_id));
g_free(new_id);
}
/* Add virtio block device */
devaddr = qemu_opt_get(legacy_opts, "addr");
if (devaddr && type != IF_VIRTIO) {
error_report("addr is not supported by this bus type");
goto fail;
}
if (type == IF_VIRTIO) {
QemuOpts *devopts;
devopts = qemu_opts_create(qemu_find_opts("device"), NULL, 0,
&error_abort);
if (arch_type == QEMU_ARCH_S390X) {
qemu_opt_set(devopts, "driver", "virtio-blk-s390");
} else {
qemu_opt_set(devopts, "driver", "virtio-blk-pci");
}
qemu_opt_set(devopts, "drive", qdict_get_str(bs_opts, "id"));
if (devaddr) {
qemu_opt_set(devopts, "addr", devaddr);
}
}
filename = qemu_opt_get(legacy_opts, "file");
/* Check werror/rerror compatibility with if=... */
werror = qemu_opt_get(legacy_opts, "werror");
if (werror != NULL) {
if (type != IF_IDE && type != IF_SCSI && type != IF_VIRTIO &&
type != IF_NONE) {
error_report("werror is not supported by this bus type");
goto fail;
}
qdict_put(bs_opts, "werror", qstring_from_str(werror));
}
rerror = qemu_opt_get(legacy_opts, "rerror");
if (rerror != NULL) {
if (type != IF_IDE && type != IF_VIRTIO && type != IF_SCSI &&
type != IF_NONE) {
error_report("rerror is not supported by this bus type");
goto fail;
}
qdict_put(bs_opts, "rerror", qstring_from_str(rerror));
}
/* Actual block device init: Functionality shared with blockdev-add */
dinfo = blockdev_init(filename, bs_opts, &local_err);
if (dinfo == NULL) {
if (local_err) {
error_report("%s", error_get_pretty(local_err));
error_free(local_err);
}
goto fail;
} else {
assert(!local_err);
}
/* Set legacy DriveInfo fields */
dinfo->enable_auto_del = true;
dinfo->opts = all_opts;
dinfo->cyls = cyls;
dinfo->heads = heads;
dinfo->secs = secs;
dinfo->trans = translation;
dinfo->type = type;
dinfo->bus = bus_id;
dinfo->unit = unit_id;
dinfo->devaddr = devaddr;
switch(type) {
case IF_IDE:
case IF_SCSI:
case IF_XEN:
case IF_NONE:
dinfo->media_cd = media == MEDIA_CDROM;
break;
default:
break;
}
fail:
qemu_opts_del(legacy_opts);
QDECREF(bs_opts);
return dinfo;
} | 4,690 |
qemu | 8a6a80896d6af03b8ee0c17cdf37219eca2588a7 | 1 | static int connect_to_ssh(BDRVSSHState *s, QDict *options,
int ssh_flags, int creat_mode, Error **errp)
{
int r, ret;
const char *host, *user, *path, *host_key_check;
int port;
if (!qdict_haskey(options, "host")) {
ret = -EINVAL;
error_setg(errp, "No hostname was specified");
goto err;
}
host = qdict_get_str(options, "host");
if (qdict_haskey(options, "port")) {
port = qdict_get_int(options, "port");
} else {
port = 22;
}
if (!qdict_haskey(options, "path")) {
ret = -EINVAL;
error_setg(errp, "No path was specified");
goto err;
}
path = qdict_get_str(options, "path");
if (qdict_haskey(options, "user")) {
user = qdict_get_str(options, "user");
} else {
user = g_get_user_name();
if (!user) {
error_setg_errno(errp, errno, "Can't get user name");
ret = -errno;
goto err;
}
}
if (qdict_haskey(options, "host_key_check")) {
host_key_check = qdict_get_str(options, "host_key_check");
} else {
host_key_check = "yes";
}
/* Construct the host:port name for inet_connect. */
g_free(s->hostport);
s->hostport = g_strdup_printf("%s:%d", host, port);
/* Open the socket and connect. */
s->sock = inet_connect(s->hostport, errp);
if (s->sock < 0) {
ret = -EIO;
goto err;
}
/* Create SSH session. */
s->session = libssh2_session_init();
if (!s->session) {
ret = -EINVAL;
session_error_setg(errp, s, "failed to initialize libssh2 session");
goto err;
}
#if TRACE_LIBSSH2 != 0
libssh2_trace(s->session, TRACE_LIBSSH2);
#endif
r = libssh2_session_handshake(s->session, s->sock);
if (r != 0) {
ret = -EINVAL;
session_error_setg(errp, s, "failed to establish SSH session");
goto err;
}
/* Check the remote host's key against known_hosts. */
ret = check_host_key(s, host, port, host_key_check, errp);
if (ret < 0) {
goto err;
}
/* Authenticate. */
ret = authenticate(s, user, errp);
if (ret < 0) {
goto err;
}
/* Start SFTP. */
s->sftp = libssh2_sftp_init(s->session);
if (!s->sftp) {
session_error_setg(errp, s, "failed to initialize sftp handle");
ret = -EINVAL;
goto err;
}
/* Open the remote file. */
DPRINTF("opening file %s flags=0x%x creat_mode=0%o",
path, ssh_flags, creat_mode);
s->sftp_handle = libssh2_sftp_open(s->sftp, path, ssh_flags, creat_mode);
if (!s->sftp_handle) {
session_error_setg(errp, s, "failed to open remote file '%s'", path);
ret = -EINVAL;
goto err;
}
r = libssh2_sftp_fstat(s->sftp_handle, &s->attrs);
if (r < 0) {
sftp_error_setg(errp, s, "failed to read file attributes");
return -EINVAL;
}
/* Delete the options we've used; any not deleted will cause the
* block layer to give an error about unused options.
*/
qdict_del(options, "host");
qdict_del(options, "port");
qdict_del(options, "user");
qdict_del(options, "path");
qdict_del(options, "host_key_check");
return 0;
err:
if (s->sftp_handle) {
libssh2_sftp_close(s->sftp_handle);
}
s->sftp_handle = NULL;
if (s->sftp) {
libssh2_sftp_shutdown(s->sftp);
}
s->sftp = NULL;
if (s->session) {
libssh2_session_disconnect(s->session,
"from qemu ssh client: "
"error opening connection");
libssh2_session_free(s->session);
}
s->session = NULL;
return ret;
}
| 4,691 |
FFmpeg | 130a8e0eef2f81e0d853117e417b650c3e16d1b7 | 1 | void ff_configure_buffers_for_index(AVFormatContext *s, int64_t time_tolerance)
{
int ist1, ist2;
int64_t pos_delta = 0;
//We could use URLProtocol flags here but as many user applications do not use URLProtocols this would be unreliable
const char *proto = avio_find_protocol_name(s->filename);
if (!(strcmp(proto, "file") && strcmp(proto, "pipe") && strcmp(proto, "cache")))
return;
for (ist1 = 0; ist1 < s->nb_streams; ist1++) {
AVStream *st1 = s->streams[ist1];
for (ist2 = 0; ist2 < s->nb_streams; ist2++) {
AVStream *st2 = s->streams[ist2];
int i1, i2;
if (ist1 == ist2)
continue;
for (i1 = i2 = 0; i1 < st1->nb_index_entries; i1++) {
AVIndexEntry *e1 = &st1->index_entries[i1];
int64_t e1_pts = av_rescale_q(e1->timestamp, st1->time_base, AV_TIME_BASE_Q);
for (; i2 < st2->nb_index_entries; i2++) {
AVIndexEntry *e2 = &st2->index_entries[i2];
int64_t e2_pts = av_rescale_q(e2->timestamp, st2->time_base, AV_TIME_BASE_Q);
if (e2_pts - e1_pts < time_tolerance)
continue;
pos_delta = FFMAX(pos_delta, e1->pos - e2->pos);
break;
}
}
}
}
pos_delta *= 2;
/* XXX This could be adjusted depending on protocol*/
if (s->pb->buffer_size < pos_delta && pos_delta < (1<<24)) {
av_log(s, AV_LOG_VERBOSE, "Reconfiguring buffers to size %"PRId64"\n", pos_delta);
ffio_set_buf_size(s->pb, pos_delta);
s->pb->short_seek_threshold = FFMAX(s->pb->short_seek_threshold, pos_delta/2);
}
}
| 4,692 |
FFmpeg | b42bcaef29e6ffcd9513e1ad92dbff07bea84c94 | 1 | int attribute_align_arg avcodec_decode_audio4(AVCodecContext *avctx,
AVFrame *frame,
int *got_frame_ptr,
const AVPacket *avpkt)
{
AVCodecInternal *avci = avctx->internal;
int planar, channels;
int ret = 0;
*got_frame_ptr = 0;
if (!avpkt->data && avpkt->size) {
av_log(avctx, AV_LOG_ERROR, "invalid packet: NULL data, size != 0\n");
}
if (avctx->codec->type != AVMEDIA_TYPE_AUDIO) {
av_log(avctx, AV_LOG_ERROR, "Invalid media type for audio\n");
}
avcodec_get_frame_defaults(frame);
if (!avctx->refcounted_frames)
av_frame_unref(&avci->to_free);
if ((avctx->codec->capabilities & CODEC_CAP_DELAY) || avpkt->size || (avctx->active_thread_type & FF_THREAD_FRAME)) {
uint8_t *side;
int side_size;
// copy to ensure we do not change avpkt
AVPacket tmp = *avpkt;
int did_split = av_packet_split_side_data(&tmp);
apply_param_change(avctx, &tmp);
avctx->pkt = &tmp;
if (HAVE_THREADS && avctx->active_thread_type & FF_THREAD_FRAME)
ret = ff_thread_decode_frame(avctx, frame, got_frame_ptr, &tmp);
else {
ret = avctx->codec->decode(avctx, frame, got_frame_ptr, &tmp);
frame->pkt_dts = avpkt->dts;
}
if (ret >= 0 && *got_frame_ptr) {
add_metadata_from_side_data(avctx, frame);
avctx->frame_number++;
av_frame_set_best_effort_timestamp(frame,
guess_correct_pts(avctx,
frame->pkt_pts,
frame->pkt_dts));
if (frame->format == AV_SAMPLE_FMT_NONE)
frame->format = avctx->sample_fmt;
if (!frame->channel_layout)
frame->channel_layout = avctx->channel_layout;
if (!av_frame_get_channels(frame))
av_frame_set_channels(frame, avctx->channels);
if (!frame->sample_rate)
frame->sample_rate = avctx->sample_rate;
}
side= av_packet_get_side_data(avctx->pkt, AV_PKT_DATA_SKIP_SAMPLES, &side_size);
if(side && side_size>=10) {
avctx->internal->skip_samples = AV_RL32(side);
av_log(avctx, AV_LOG_DEBUG, "skip %d samples due to side data\n",
avctx->internal->skip_samples);
}
if (avctx->internal->skip_samples && *got_frame_ptr) {
if(frame->nb_samples <= avctx->internal->skip_samples){
*got_frame_ptr = 0;
avctx->internal->skip_samples -= frame->nb_samples;
av_log(avctx, AV_LOG_DEBUG, "skip whole frame, skip left: %d\n",
avctx->internal->skip_samples);
} else {
av_samples_copy(frame->extended_data, frame->extended_data, 0, avctx->internal->skip_samples,
frame->nb_samples - avctx->internal->skip_samples, avctx->channels, frame->format);
if(avctx->pkt_timebase.num && avctx->sample_rate) {
int64_t diff_ts = av_rescale_q(avctx->internal->skip_samples,
(AVRational){1, avctx->sample_rate},
avctx->pkt_timebase);
if(frame->pkt_pts!=AV_NOPTS_VALUE)
frame->pkt_pts += diff_ts;
if(frame->pkt_dts!=AV_NOPTS_VALUE)
frame->pkt_dts += diff_ts;
if (av_frame_get_pkt_duration(frame) >= diff_ts)
av_frame_set_pkt_duration(frame, av_frame_get_pkt_duration(frame) - diff_ts);
} else {
av_log(avctx, AV_LOG_WARNING, "Could not update timestamps for skipped samples.\n");
}
av_log(avctx, AV_LOG_DEBUG, "skip %d/%d samples\n",
avctx->internal->skip_samples, frame->nb_samples);
frame->nb_samples -= avctx->internal->skip_samples;
avctx->internal->skip_samples = 0;
}
}
avctx->pkt = NULL;
if (did_split) {
ff_packet_free_side_data(&tmp);
if(ret == tmp.size)
ret = avpkt->size;
}
if (ret >= 0 && *got_frame_ptr) {
if (!avctx->refcounted_frames) {
avci->to_free = *frame;
avci->to_free.extended_data = avci->to_free.data;
memset(frame->buf, 0, sizeof(frame->buf));
frame->extended_buf = NULL;
frame->nb_extended_buf = 0;
}
} else if (frame->data[0])
av_frame_unref(frame);
}
/* many decoders assign whole AVFrames, thus overwriting extended_data;
* make sure it's set correctly; assume decoders that actually use
* extended_data are doing it correctly */
if (*got_frame_ptr) {
planar = av_sample_fmt_is_planar(frame->format);
channels = av_frame_get_channels(frame);
if (!(planar && channels > AV_NUM_DATA_POINTERS))
frame->extended_data = frame->data;
} else {
frame->extended_data = NULL;
}
return ret;
} | 4,693 |
qemu | 14a10fc39923b3af07c8c46d22cb20843bee3a72 | 1 | static void s390_cpu_realizefn(DeviceState *dev, Error **errp)
{
S390CPU *cpu = S390_CPU(dev);
S390CPUClass *scc = S390_CPU_GET_CLASS(dev);
cpu_reset(CPU(cpu));
scc->parent_realize(dev, errp);
}
| 4,694 |
FFmpeg | 9e6a2427558a718be0c1fffacffd935f630a7a8d | 1 | static void vp56_decode_mb(VP56Context *s, int row, int col, int is_alpha)
{
AVFrame *frame_current, *frame_ref;
VP56mb mb_type;
VP56Frame ref_frame;
int b, ab, b_max, plane, off;
if (s->frames[VP56_FRAME_CURRENT]->key_frame)
mb_type = VP56_MB_INTRA;
else
mb_type = vp56_decode_mv(s, row, col);
ref_frame = ff_vp56_reference_frame[mb_type];
s->parse_coeff(s);
vp56_add_predictors_dc(s, ref_frame);
frame_current = s->frames[VP56_FRAME_CURRENT];
frame_ref = s->frames[ref_frame];
if (mb_type != VP56_MB_INTRA && !frame_ref->data[0])
return;
ab = 6*is_alpha;
b_max = 6 - 2*is_alpha;
switch (mb_type) {
case VP56_MB_INTRA:
for (b=0; b<b_max; b++) {
plane = ff_vp56_b2p[b+ab];
s->vp3dsp.idct_put(frame_current->data[plane] + s->block_offset[b],
s->stride[plane], s->block_coeff[b]);
}
break;
case VP56_MB_INTER_NOVEC_PF:
case VP56_MB_INTER_NOVEC_GF:
for (b=0; b<b_max; b++) {
plane = ff_vp56_b2p[b+ab];
off = s->block_offset[b];
s->hdsp.put_pixels_tab[1][0](frame_current->data[plane] + off,
frame_ref->data[plane] + off,
s->stride[plane], 8);
s->vp3dsp.idct_add(frame_current->data[plane] + off,
s->stride[plane], s->block_coeff[b]);
}
break;
case VP56_MB_INTER_DELTA_PF:
case VP56_MB_INTER_V1_PF:
case VP56_MB_INTER_V2_PF:
case VP56_MB_INTER_DELTA_GF:
case VP56_MB_INTER_4V:
case VP56_MB_INTER_V1_GF:
case VP56_MB_INTER_V2_GF:
for (b=0; b<b_max; b++) {
int x_off = b==1 || b==3 ? 8 : 0;
int y_off = b==2 || b==3 ? 8 : 0;
plane = ff_vp56_b2p[b+ab];
vp56_mc(s, b, plane, frame_ref->data[plane], s->stride[plane],
16*col+x_off, 16*row+y_off);
s->vp3dsp.idct_add(frame_current->data[plane] + s->block_offset[b],
s->stride[plane], s->block_coeff[b]);
}
break;
}
if (is_alpha) {
s->block_coeff[4][0] = 0;
s->block_coeff[5][0] = 0;
}
}
| 4,695 |
qemu | 129c7d1c536d0c67a8781cb09fb5bdb3d0f6a2d0 | 1 | static int qemu_rbd_create(const char *filename, QemuOpts *opts, Error **errp)
{
Error *local_err = NULL;
int64_t bytes = 0;
int64_t objsize;
int obj_order = 0;
const char *pool, *name, *conf, *clientname, *keypairs;
const char *secretid;
rados_t cluster;
rados_ioctx_t io_ctx;
QDict *options = NULL;
int ret = 0;
secretid = qemu_opt_get(opts, "password-secret");
/* Read out options */
bytes = ROUND_UP(qemu_opt_get_size_del(opts, BLOCK_OPT_SIZE, 0),
BDRV_SECTOR_SIZE);
objsize = qemu_opt_get_size_del(opts, BLOCK_OPT_CLUSTER_SIZE, 0);
if (objsize) {
if ((objsize - 1) & objsize) { /* not a power of 2? */
error_setg(errp, "obj size needs to be power of 2");
ret = -EINVAL;
goto exit;
}
if (objsize < 4096) {
error_setg(errp, "obj size too small");
ret = -EINVAL;
goto exit;
}
obj_order = ctz32(objsize);
}
options = qdict_new();
qemu_rbd_parse_filename(filename, options, &local_err);
if (local_err) {
ret = -EINVAL;
error_propagate(errp, local_err);
goto exit;
}
pool = qdict_get_try_str(options, "pool");
conf = qdict_get_try_str(options, "conf");
clientname = qdict_get_try_str(options, "user");
name = qdict_get_try_str(options, "image");
keypairs = qdict_get_try_str(options, "=keyvalue-pairs");
ret = rados_create(&cluster, clientname);
if (ret < 0) {
error_setg_errno(errp, -ret, "error initializing");
goto exit;
}
/* try default location when conf=NULL, but ignore failure */
ret = rados_conf_read_file(cluster, conf);
if (conf && ret < 0) {
error_setg_errno(errp, -ret, "error reading conf file %s", conf);
ret = -EIO;
goto shutdown;
}
ret = qemu_rbd_set_keypairs(cluster, keypairs, errp);
if (ret < 0) {
ret = -EIO;
goto shutdown;
}
if (qemu_rbd_set_auth(cluster, secretid, errp) < 0) {
ret = -EIO;
goto shutdown;
}
ret = rados_connect(cluster);
if (ret < 0) {
error_setg_errno(errp, -ret, "error connecting");
goto shutdown;
}
ret = rados_ioctx_create(cluster, pool, &io_ctx);
if (ret < 0) {
error_setg_errno(errp, -ret, "error opening pool %s", pool);
goto shutdown;
}
ret = rbd_create(io_ctx, name, bytes, &obj_order);
if (ret < 0) {
error_setg_errno(errp, -ret, "error rbd create");
}
rados_ioctx_destroy(io_ctx);
shutdown:
rados_shutdown(cluster);
exit:
QDECREF(options);
return ret;
} | 4,696 |
FFmpeg | 29fb49194bedc74ac9be0b49b6b42dcfeb6222d9 | 0 | void av_force_cpu_flags(int arg){
if ( (arg & ( AV_CPU_FLAG_3DNOW |
AV_CPU_FLAG_3DNOWEXT |
AV_CPU_FLAG_MMXEXT |
AV_CPU_FLAG_SSE |
AV_CPU_FLAG_SSE2 |
AV_CPU_FLAG_SSE2SLOW |
AV_CPU_FLAG_SSE3 |
AV_CPU_FLAG_SSE3SLOW |
AV_CPU_FLAG_SSSE3 |
AV_CPU_FLAG_SSE4 |
AV_CPU_FLAG_SSE42 |
AV_CPU_FLAG_AVX |
AV_CPU_FLAG_AVXSLOW |
AV_CPU_FLAG_XOP |
AV_CPU_FLAG_FMA3 |
AV_CPU_FLAG_FMA4 |
AV_CPU_FLAG_AVX2 ))
&& !(arg & AV_CPU_FLAG_MMX)) {
av_log(NULL, AV_LOG_WARNING, "MMX implied by specified flags\n");
arg |= AV_CPU_FLAG_MMX;
}
flags = arg;
checked = arg != -1;
}
| 4,699 |
FFmpeg | 43fab7aafc3efc3d88e23a1ba27b939be09b3bd3 | 0 | static int select_frame(AVFilterContext *ctx, AVFilterBufferRef *picref)
{
SelectContext *select = ctx->priv;
AVFilterLink *inlink = ctx->inputs[0];
double res;
if (isnan(select->var_values[VAR_START_PTS]))
select->var_values[VAR_START_PTS] = TS2D(picref->pts);
if (isnan(select->var_values[VAR_START_T]))
select->var_values[VAR_START_T] = TS2D(picref->pts) * av_q2d(inlink->time_base);
select->var_values[VAR_PTS] = TS2D(picref->pts);
select->var_values[VAR_T ] = picref->pts * av_q2d(inlink->time_base);
select->var_values[VAR_POS] = picref->pos == -1 ? NAN : picref->pos;
select->var_values[VAR_PREV_PTS] = TS2D(picref ->pts);
select->var_values[VAR_INTERLACE_TYPE] =
!picref->video->interlaced ? INTERLACE_TYPE_P :
picref->video->top_field_first ? INTERLACE_TYPE_T : INTERLACE_TYPE_B;
select->var_values[VAR_PICT_TYPE] = picref->video->pict_type;
res = av_expr_eval(select->expr, select->var_values, NULL);
av_log(inlink->dst, AV_LOG_DEBUG,
"n:%d pts:%d t:%f pos:%d interlace_type:%c key:%d pict_type:%c "
"-> select:%f\n",
(int)select->var_values[VAR_N],
(int)select->var_values[VAR_PTS],
select->var_values[VAR_T],
(int)select->var_values[VAR_POS],
select->var_values[VAR_INTERLACE_TYPE] == INTERLACE_TYPE_P ? 'P' :
select->var_values[VAR_INTERLACE_TYPE] == INTERLACE_TYPE_T ? 'T' :
select->var_values[VAR_INTERLACE_TYPE] == INTERLACE_TYPE_B ? 'B' : '?',
(int)select->var_values[VAR_KEY],
av_get_picture_type_char(select->var_values[VAR_PICT_TYPE]),
res);
select->var_values[VAR_N] += 1.0;
if (res) {
select->var_values[VAR_PREV_SELECTED_N] = select->var_values[VAR_N];
select->var_values[VAR_PREV_SELECTED_PTS] = select->var_values[VAR_PTS];
select->var_values[VAR_PREV_SELECTED_T] = select->var_values[VAR_T];
select->var_values[VAR_SELECTED_N] += 1.0;
}
return res;
}
| 4,700 |
FFmpeg | ed2112fb36d7407d960b4f44475a700a7c44344c | 0 | static int mov_write_identification(AVIOContext *pb, AVFormatContext *s)
{
MOVMuxContext *mov = s->priv_data;
int i;
mov_write_ftyp_tag(pb,s);
if (mov->mode == MODE_PSP) {
int video_streams_nb = 0, audio_streams_nb = 0, other_streams_nb = 0;
for (i = 0; i < s->nb_streams; i++) {
AVStream *st = s->streams[i];
if (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
video_streams_nb++;
else if (st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
audio_streams_nb++;
else
other_streams_nb++;
}
if (video_streams_nb != 1 || audio_streams_nb != 1 || other_streams_nb) {
av_log(s, AV_LOG_ERROR, "PSP mode need one video and one audio stream\n");
return AVERROR(EINVAL);
}
mov_write_uuidprof_tag(pb, s);
}
return 0;
}
| 4,702 |
qemu | e68c35cfb8088a11300371751e3987f67cac15b1 | 0 | static QIOChannel *nbd_negotiate_handle_starttls(NBDClient *client,
uint32_t length,
Error **errp)
{
QIOChannel *ioc;
QIOChannelTLS *tioc;
struct NBDTLSHandshakeData data = { 0 };
trace_nbd_negotiate_handle_starttls();
ioc = client->ioc;
if (length) {
if (nbd_drop(ioc, length, errp) < 0) {
return NULL;
}
nbd_negotiate_send_rep_err(ioc, NBD_REP_ERR_INVALID, NBD_OPT_STARTTLS,
errp,
"OPT_STARTTLS should not have length");
return NULL;
}
if (nbd_negotiate_send_rep(client->ioc, NBD_REP_ACK,
NBD_OPT_STARTTLS, errp) < 0) {
return NULL;
}
tioc = qio_channel_tls_new_server(ioc,
client->tlscreds,
client->tlsaclname,
errp);
if (!tioc) {
return NULL;
}
qio_channel_set_name(QIO_CHANNEL(tioc), "nbd-server-tls");
trace_nbd_negotiate_handle_starttls_handshake();
data.loop = g_main_loop_new(g_main_context_default(), FALSE);
qio_channel_tls_handshake(tioc,
nbd_tls_handshake,
&data,
NULL);
if (!data.complete) {
g_main_loop_run(data.loop);
}
g_main_loop_unref(data.loop);
if (data.error) {
object_unref(OBJECT(tioc));
error_propagate(errp, data.error);
return NULL;
}
return QIO_CHANNEL(tioc);
}
| 4,703 |
qemu | 5fb6c7a8b26eab1a22207d24b4784bd2b39ab54b | 0 | static void vnc_write_u32(VncState *vs, uint32_t value)
{
uint8_t buf[4];
buf[0] = (value >> 24) & 0xFF;
buf[1] = (value >> 16) & 0xFF;
buf[2] = (value >> 8) & 0xFF;
buf[3] = value & 0xFF;
vnc_write(vs, buf, 4);
}
| 4,704 |
qemu | 8b42704441865611a5ee241ac9fc5cabc47a079b | 0 | static int tcg_cpu_exec(CPUState *cpu)
{
int ret;
#ifdef CONFIG_PROFILER
int64_t ti;
#endif
#ifdef CONFIG_PROFILER
ti = profile_getclock();
#endif
if (use_icount) {
int64_t count;
int64_t deadline;
int decr;
timers_state.qemu_icount -= (cpu->icount_decr.u16.low
+ cpu->icount_extra);
cpu->icount_decr.u16.low = 0;
cpu->icount_extra = 0;
deadline = qemu_clock_deadline_ns_all(QEMU_CLOCK_VIRTUAL);
/* Maintain prior (possibly buggy) behaviour where if no deadline
* was set (as there is no QEMU_CLOCK_VIRTUAL timer) or it is more than
* INT32_MAX nanoseconds ahead, we still use INT32_MAX
* nanoseconds.
*/
if ((deadline < 0) || (deadline > INT32_MAX)) {
deadline = INT32_MAX;
}
count = qemu_icount_round(deadline);
timers_state.qemu_icount += count;
decr = (count > 0xffff) ? 0xffff : count;
count -= decr;
cpu->icount_decr.u16.low = decr;
cpu->icount_extra = count;
}
ret = cpu_exec(cpu);
#ifdef CONFIG_PROFILER
tcg_time += profile_getclock() - ti;
#endif
if (use_icount) {
/* Fold pending instructions back into the
instruction counter, and clear the interrupt flag. */
timers_state.qemu_icount -= (cpu->icount_decr.u16.low
+ cpu->icount_extra);
cpu->icount_decr.u32 = 0;
cpu->icount_extra = 0;
}
return ret;
}
| 4,705 |
qemu | 1687a089f103f9b7a1b4a1555068054cb46ee9e9 | 0 | vcard_emul_login(VCard *card, unsigned char *pin, int pin_len)
{
PK11SlotInfo *slot;
unsigned char *pin_string = NULL;
int i;
SECStatus rv;
if (!nss_emul_init) {
return VCARD7816_STATUS_ERROR_CONDITION_NOT_SATISFIED;
}
slot = vcard_emul_card_get_slot(card);
/* We depend on the PKCS #11 module internal login state here because we
* create a separate process to handle each guest instance. If we needed
* to handle multiple guests from one process, then we would need to keep
* a lot of extra state in our card structure
* */
pin_string = g_malloc(pin_len+1);
memcpy(pin_string, pin, pin_len);
pin_string[pin_len] = 0;
/* handle CAC expanded pins correctly */
for (i = pin_len-1; i >= 0 && (pin_string[i] == 0xff); i--) {
pin_string[i] = 0;
}
rv = PK11_Authenticate(slot, PR_FALSE, pin_string);
memset(pin_string, 0, pin_len); /* don't let the pin hang around in memory
to be snooped */
g_free(pin_string);
if (rv == SECSuccess) {
return VCARD7816_STATUS_SUCCESS;
}
/* map the error from port get error */
return VCARD7816_STATUS_ERROR_CONDITION_NOT_SATISFIED;
}
| 4,706 |
FFmpeg | 82dd7d0dec29ee59af91ce18c29eb151b363ff37 | 0 | static int motion_inter_block (bit_buffer_t *bitbuf,
uint8_t *current, uint8_t *previous, int pitch,
svq1_pmv_t *motion, int x, int y) {
uint8_t *src;
uint8_t *dst;
svq1_pmv_t mv;
svq1_pmv_t *pmv[3];
int result;
/* predict and decode motion vector */
pmv[0] = &motion[0];
pmv[1] = &motion[(x / 8) + 2];
pmv[2] = &motion[(x / 8) + 4];
if (y == 0) {
pmv[1] = pmv[0];
pmv[2] = pmv[0];
}
result = decode_motion_vector (bitbuf, &mv, pmv);
if (result != 0)
return result;
motion[0].x = mv.x;
motion[0].y = mv.y;
motion[(x / 8) + 2].x = mv.x;
motion[(x / 8) + 2].y = mv.y;
motion[(x / 8) + 3].x = mv.x;
motion[(x / 8) + 3].y = mv.y;
src = &previous[(x + (mv.x >> 1)) + (y + (mv.y >> 1))*pitch];
dst = current;
put_pixels_tab[((mv.y & 1) << 1) | (mv.x & 1)](dst,src,pitch,16);
put_pixels_tab[((mv.y & 1) << 1) | (mv.x & 1)](dst+8,src+8,pitch,16);
return 0;
}
| 4,707 |
qemu | 1687a089f103f9b7a1b4a1555068054cb46ee9e9 | 0 | vreader_list_delete(VReaderList *list)
{
VReaderListEntry *current_entry;
VReaderListEntry *next_entry = NULL;
for (current_entry = vreader_list_get_first(list); current_entry;
current_entry = next_entry) {
next_entry = vreader_list_get_next(current_entry);
vreader_list_entry_delete(current_entry);
}
list->head = NULL;
list->tail = NULL;
g_free(list);
}
| 4,709 |
qemu | a8170e5e97ad17ca169c64ba87ae2f53850dab4c | 0 | static uint32_t cadence_ttc_read_imp(void *opaque, target_phys_addr_t offset)
{
CadenceTimerState *s = cadence_timer_from_addr(opaque, offset);
uint32_t value;
cadence_timer_sync(s);
cadence_timer_run(s);
switch (offset) {
case 0x00: /* clock control */
case 0x04:
case 0x08:
return s->reg_clock;
case 0x0c: /* counter control */
case 0x10:
case 0x14:
return s->reg_count;
case 0x18: /* counter value */
case 0x1c:
case 0x20:
return (uint16_t)(s->reg_value >> 16);
case 0x24: /* reg_interval counter */
case 0x28:
case 0x2c:
return s->reg_interval;
case 0x30: /* match 1 counter */
case 0x34:
case 0x38:
return s->reg_match[0];
case 0x3c: /* match 2 counter */
case 0x40:
case 0x44:
return s->reg_match[1];
case 0x48: /* match 3 counter */
case 0x4c:
case 0x50:
return s->reg_match[2];
case 0x54: /* interrupt register */
case 0x58:
case 0x5c:
/* cleared after read */
value = s->reg_intr;
s->reg_intr = 0;
cadence_timer_update(s);
return value;
case 0x60: /* interrupt enable */
case 0x64:
case 0x68:
return s->reg_intr_en;
case 0x6c:
case 0x70:
case 0x74:
return s->reg_event_ctrl;
case 0x78:
case 0x7c:
case 0x80:
return s->reg_event;
default:
return 0;
}
}
| 4,710 |
qemu | 61007b316cd71ee7333ff7a0a749a8949527575f | 0 | static int coroutine_fn bdrv_co_do_writev(BlockDriverState *bs,
int64_t sector_num, int nb_sectors, QEMUIOVector *qiov,
BdrvRequestFlags flags)
{
if (nb_sectors < 0 || nb_sectors > BDRV_REQUEST_MAX_SECTORS) {
return -EINVAL;
}
return bdrv_co_do_pwritev(bs, sector_num << BDRV_SECTOR_BITS,
nb_sectors << BDRV_SECTOR_BITS, qiov, flags);
}
| 4,711 |
qemu | dfd100f242370886bb6732f70f1f7cbd8eb9fedc | 0 | static int vnc_display_connect(VncDisplay *vd,
SocketAddress **saddr,
size_t nsaddr,
SocketAddress **wsaddr,
size_t nwsaddr,
Error **errp)
{
/* connect to viewer */
QIOChannelSocket *sioc = NULL;
if (nwsaddr != 0) {
error_setg(errp, "Cannot use websockets in reverse mode");
return -1;
}
if (nsaddr != 1) {
error_setg(errp, "Expected a single address in reverse mode");
return -1;
}
/* TODO SOCKET_ADDRESS_KIND_FD when fd has AF_UNIX */
vd->is_unix = saddr[0]->type == SOCKET_ADDRESS_KIND_UNIX;
sioc = qio_channel_socket_new();
qio_channel_set_name(QIO_CHANNEL(sioc), "vnc-reverse");
if (qio_channel_socket_connect_sync(sioc, saddr[0], errp) < 0) {
return -1;
}
vnc_connect(vd, sioc, false, false);
object_unref(OBJECT(sioc));
return 0;
}
| 4,712 |
qemu | e37dac06dc4e85a2f46c24261c0dfdf2a30b50e3 | 0 | static int vfio_add_ext_cap(VFIOPCIDevice *vdev)
{
PCIDevice *pdev = &vdev->pdev;
uint32_t header;
uint16_t cap_id, next, size;
uint8_t cap_ver;
uint8_t *config;
/*
* pcie_add_capability always inserts the new capability at the tail
* of the chain. Therefore to end up with a chain that matches the
* physical device, we cache the config space to avoid overwriting
* the original config space when we parse the extended capabilities.
*/
config = g_memdup(pdev->config, vdev->config_size);
for (next = PCI_CONFIG_SPACE_SIZE; next;
next = PCI_EXT_CAP_NEXT(pci_get_long(config + next))) {
header = pci_get_long(config + next);
cap_id = PCI_EXT_CAP_ID(header);
cap_ver = PCI_EXT_CAP_VER(header);
/*
* If it becomes important to configure extended capabilities to their
* actual size, use this as the default when it's something we don't
* recognize. Since QEMU doesn't actually handle many of the config
* accesses, exact size doesn't seem worthwhile.
*/
size = vfio_ext_cap_max_size(config, next);
pcie_add_capability(pdev, cap_id, cap_ver, next, size);
pci_set_long(pdev->config + next, PCI_EXT_CAP(cap_id, cap_ver, 0));
/* Use emulated next pointer to allow dropping extended caps */
pci_long_test_and_set_mask(vdev->emulated_config_bits + next,
PCI_EXT_CAP_NEXT_MASK);
}
g_free(config);
return 0;
}
| 4,714 |
qemu | ef1e1e0782e99c9dcf2b35e5310cdd8ca9211374 | 0 | void desc_ring_free(DescRing *ring)
{
if (ring->info) {
g_free(ring->info);
}
g_free(ring);
}
| 4,715 |
qemu | f8b0aa25599782eef91edc00ebf620bd14db720c | 0 | setup_return(CPUState *env, struct emulated_sigaction *ka,
abi_ulong *rc, void *frame, int usig)
{
abi_ulong handler = (abi_ulong)ka->sa._sa_handler;
abi_ulong retcode;
int thumb = 0;
#if defined(TARGET_CONFIG_CPU_32)
#if 0
abi_ulong cpsr = env->cpsr;
/*
* Maybe we need to deliver a 32-bit signal to a 26-bit task.
*/
if (ka->sa.sa_flags & SA_THIRTYTWO)
cpsr = (cpsr & ~MODE_MASK) | USR_MODE;
#ifdef CONFIG_ARM_THUMB
if (elf_hwcap & HWCAP_THUMB) {
/*
* The LSB of the handler determines if we're going to
* be using THUMB or ARM mode for this signal handler.
*/
thumb = handler & 1;
if (thumb)
cpsr |= T_BIT;
else
cpsr &= ~T_BIT;
}
#endif /* CONFIG_ARM_THUMB */
#endif /* 0 */
#endif /* TARGET_CONFIG_CPU_32 */
if (ka->sa.sa_flags & TARGET_SA_RESTORER) {
retcode = (abi_ulong)ka->sa.sa_restorer;
} else {
unsigned int idx = thumb;
if (ka->sa.sa_flags & TARGET_SA_SIGINFO)
idx += 2;
if (__put_user(retcodes[idx], rc))
return 1;
#if 0
flush_icache_range((abi_ulong)rc,
(abi_ulong)(rc + 1));
#endif
retcode = ((abi_ulong)rc) + thumb;
}
env->regs[0] = usig;
env->regs[13] = h2g(frame);
env->regs[14] = retcode;
env->regs[15] = handler & (thumb ? ~1 : ~3);
#if 0
#ifdef TARGET_CONFIG_CPU_32
env->cpsr = cpsr;
#endif
#endif
return 0;
}
| 4,716 |
qemu | a6baa60807f88ba7d97b1787797fb58882ccbfb9 | 0 | BlockStatsList *qmp_query_blockstats(bool has_query_nodes,
bool query_nodes,
Error **errp)
{
BlockStatsList *head = NULL, **p_next = &head;
BlockBackend *blk = NULL;
BlockDriverState *bs = NULL;
/* Just to be safe if query_nodes is not always initialized */
query_nodes = has_query_nodes && query_nodes;
while (next_query_bds(&blk, &bs, query_nodes)) {
BlockStatsList *info = g_malloc0(sizeof(*info));
AioContext *ctx = blk ? blk_get_aio_context(blk)
: bdrv_get_aio_context(bs);
aio_context_acquire(ctx);
info->value = bdrv_query_stats(blk, bs, !query_nodes);
aio_context_release(ctx);
*p_next = info;
p_next = &info->next;
}
return head;
}
| 4,717 |
qemu | a8170e5e97ad17ca169c64ba87ae2f53850dab4c | 0 | static void dp8393x_writel(void *opaque, target_phys_addr_t addr, uint32_t val)
{
dp8393x_writew(opaque, addr, val & 0xffff);
dp8393x_writew(opaque, addr + 2, (val >> 16) & 0xffff);
}
| 4,719 |
qemu | 72cf2d4f0e181d0d3a3122e04129c58a95da713e | 0 | void usb_register_port(USBBus *bus, USBPort *port, void *opaque, int index,
usb_attachfn attach)
{
port->opaque = opaque;
port->index = index;
port->attach = attach;
TAILQ_INSERT_TAIL(&bus->free, port, next);
bus->nfree++;
}
| 4,720 |
qemu | cd723b85601baa7a0eeffbac83421357a70d81ee | 0 | static void scsi_read_data(SCSIRequest *req)
{
SCSIDiskReq *r = DO_UPCAST(SCSIDiskReq, req, req);
SCSIDiskState *s = DO_UPCAST(SCSIDiskState, qdev, r->req.dev);
bool first;
DPRINTF("Read sector_count=%d\n", r->sector_count);
if (r->sector_count == 0) {
/* This also clears the sense buffer for REQUEST SENSE. */
scsi_req_complete(&r->req, GOOD);
return;
}
/* No data transfer may already be in progress */
assert(r->req.aiocb == NULL);
/* The request is used as the AIO opaque value, so add a ref. */
scsi_req_ref(&r->req);
if (r->req.cmd.mode == SCSI_XFER_TO_DEV) {
DPRINTF("Data transfer direction invalid\n");
scsi_read_complete(r, -EINVAL);
return;
}
if (s->tray_open) {
scsi_read_complete(r, -ENOMEDIUM);
return;
}
first = !r->started;
r->started = true;
if (first && r->need_fua_emulation) {
block_acct_start(blk_get_stats(s->qdev.conf.blk), &r->acct, 0,
BLOCK_ACCT_FLUSH);
r->req.aiocb = blk_aio_flush(s->qdev.conf.blk, scsi_do_read_cb, r);
} else {
scsi_do_read(r, 0);
}
}
| 4,721 |
qemu | 4417ab7adf1613799054be5afedf810fc2524ee8 | 0 | void qmp_cont(Error **errp)
{
Error *local_err = NULL;
BlockBackend *blk;
BlockDriverState *bs;
BdrvNextIterator it;
/* if there is a dump in background, we should wait until the dump
* finished */
if (dump_in_progress()) {
error_setg(errp, "There is a dump in process, please wait.");
return;
}
if (runstate_needs_reset()) {
error_setg(errp, "Resetting the Virtual Machine is required");
return;
} else if (runstate_check(RUN_STATE_SUSPENDED)) {
return;
}
for (blk = blk_next(NULL); blk; blk = blk_next(blk)) {
blk_iostatus_reset(blk);
}
for (bs = bdrv_first(&it); bs; bs = bdrv_next(&it)) {
bdrv_add_key(bs, NULL, &local_err);
if (local_err) {
error_propagate(errp, local_err);
return;
}
}
/* Continuing after completed migration. Images have been inactivated to
* allow the destination to take control. Need to get control back now.
*
* If there are no inactive block nodes (e.g. because the VM was just
* paused rather than completing a migration), bdrv_inactivate_all() simply
* doesn't do anything. */
bdrv_invalidate_cache_all(&local_err);
if (local_err) {
error_propagate(errp, local_err);
return;
}
blk_resume_after_migration(&local_err);
if (local_err) {
error_propagate(errp, local_err);
return;
}
if (runstate_check(RUN_STATE_INMIGRATE)) {
autostart = 1;
} else {
vm_start();
}
}
| 4,722 |
qemu | 9bc3a3a216e2689bfcdd36c3e079333bbdbf3ba0 | 0 | static void ehci_queues_rip_unused(EHCIState *ehci, int async, int flush)
{
EHCIQueueHead *head = async ? &ehci->aqueues : &ehci->pqueues;
uint64_t maxage = FRAME_TIMER_NS * ehci->maxframes * 4;
EHCIQueue *q, *tmp;
QTAILQ_FOREACH_SAFE(q, head, next, tmp) {
if (q->seen) {
q->seen = 0;
q->ts = ehci->last_run_ns;
continue;
}
if (!flush && ehci->last_run_ns < q->ts + maxage) {
continue;
}
ehci_free_queue(q);
}
}
| 4,724 |
qemu | a89f364ae8740dfc31b321eed9ee454e996dc3c1 | 0 | static uint32_t mipid_txrx(void *opaque, uint32_t cmd, int len)
{
struct mipid_s *s = (struct mipid_s *) opaque;
uint8_t ret;
if (len > 9) {
hw_error("%s: FIXME: bad SPI word width %i\n", __FUNCTION__, len);
}
if (s->p >= ARRAY_SIZE(s->resp)) {
ret = 0;
} else {
ret = s->resp[s->p++];
}
if (s->pm-- > 0) {
s->param[s->pm] = cmd;
} else {
s->cmd = cmd;
}
switch (s->cmd) {
case 0x00: /* NOP */
break;
case 0x01: /* SWRESET */
mipid_reset(s);
break;
case 0x02: /* BSTROFF */
s->booster = 0;
break;
case 0x03: /* BSTRON */
s->booster = 1;
break;
case 0x04: /* RDDID */
s->p = 0;
s->resp[0] = (s->id >> 16) & 0xff;
s->resp[1] = (s->id >> 8) & 0xff;
s->resp[2] = (s->id >> 0) & 0xff;
break;
case 0x06: /* RD_RED */
case 0x07: /* RD_GREEN */
/* XXX the bootloader sometimes issues RD_BLUE meaning RDDID so
* for the bootloader one needs to change this. */
case 0x08: /* RD_BLUE */
s->p = 0;
/* TODO: return first pixel components */
s->resp[0] = 0x01;
break;
case 0x09: /* RDDST */
s->p = 0;
s->resp[0] = s->booster << 7;
s->resp[1] = (5 << 4) | (s->partial << 2) |
(s->sleep << 1) | s->normal;
s->resp[2] = (s->vscr << 7) | (s->invert << 5) |
(s->onoff << 2) | (s->te << 1) | (s->gamma >> 2);
s->resp[3] = s->gamma << 6;
break;
case 0x0a: /* RDDPM */
s->p = 0;
s->resp[0] = (s->onoff << 2) | (s->normal << 3) | (s->sleep << 4) |
(s->partial << 5) | (s->sleep << 6) | (s->booster << 7);
break;
case 0x0b: /* RDDMADCTR */
s->p = 0;
s->resp[0] = 0;
break;
case 0x0c: /* RDDCOLMOD */
s->p = 0;
s->resp[0] = 5; /* 65K colours */
break;
case 0x0d: /* RDDIM */
s->p = 0;
s->resp[0] = (s->invert << 5) | (s->vscr << 7) | s->gamma;
break;
case 0x0e: /* RDDSM */
s->p = 0;
s->resp[0] = s->te << 7;
break;
case 0x0f: /* RDDSDR */
s->p = 0;
s->resp[0] = s->selfcheck;
break;
case 0x10: /* SLPIN */
s->sleep = 1;
break;
case 0x11: /* SLPOUT */
s->sleep = 0;
s->selfcheck ^= 1 << 6; /* POFF self-diagnosis Ok */
break;
case 0x12: /* PTLON */
s->partial = 1;
s->normal = 0;
s->vscr = 0;
break;
case 0x13: /* NORON */
s->partial = 0;
s->normal = 1;
s->vscr = 0;
break;
case 0x20: /* INVOFF */
s->invert = 0;
break;
case 0x21: /* INVON */
s->invert = 1;
break;
case 0x22: /* APOFF */
case 0x23: /* APON */
goto bad_cmd;
case 0x25: /* WRCNTR */
if (s->pm < 0) {
s->pm = 1;
}
goto bad_cmd;
case 0x26: /* GAMSET */
if (!s->pm) {
s->gamma = ctz32(s->param[0] & 0xf);
if (s->gamma == 32) {
s->gamma = -1; /* XXX: should this be 0? */
}
} else if (s->pm < 0) {
s->pm = 1;
}
break;
case 0x28: /* DISPOFF */
s->onoff = 0;
break;
case 0x29: /* DISPON */
s->onoff = 1;
break;
case 0x2a: /* CASET */
case 0x2b: /* RASET */
case 0x2c: /* RAMWR */
case 0x2d: /* RGBSET */
case 0x2e: /* RAMRD */
case 0x30: /* PTLAR */
case 0x33: /* SCRLAR */
goto bad_cmd;
case 0x34: /* TEOFF */
s->te = 0;
break;
case 0x35: /* TEON */
if (!s->pm) {
s->te = 1;
} else if (s->pm < 0) {
s->pm = 1;
}
break;
case 0x36: /* MADCTR */
goto bad_cmd;
case 0x37: /* VSCSAD */
s->partial = 0;
s->normal = 0;
s->vscr = 1;
break;
case 0x38: /* IDMOFF */
case 0x39: /* IDMON */
case 0x3a: /* COLMOD */
goto bad_cmd;
case 0xb0: /* CLKINT / DISCTL */
case 0xb1: /* CLKEXT */
if (s->pm < 0) {
s->pm = 2;
}
break;
case 0xb4: /* FRMSEL */
break;
case 0xb5: /* FRM8SEL */
case 0xb6: /* TMPRNG / INIESC */
case 0xb7: /* TMPHIS / NOP2 */
case 0xb8: /* TMPREAD / MADCTL */
case 0xba: /* DISTCTR */
case 0xbb: /* EPVOL */
goto bad_cmd;
case 0xbd: /* Unknown */
s->p = 0;
s->resp[0] = 0;
s->resp[1] = 1;
break;
case 0xc2: /* IFMOD */
if (s->pm < 0) {
s->pm = 2;
}
break;
case 0xc6: /* PWRCTL */
case 0xc7: /* PPWRCTL */
case 0xd0: /* EPWROUT */
case 0xd1: /* EPWRIN */
case 0xd4: /* RDEV */
case 0xd5: /* RDRR */
goto bad_cmd;
case 0xda: /* RDID1 */
s->p = 0;
s->resp[0] = (s->id >> 16) & 0xff;
break;
case 0xdb: /* RDID2 */
s->p = 0;
s->resp[0] = (s->id >> 8) & 0xff;
break;
case 0xdc: /* RDID3 */
s->p = 0;
s->resp[0] = (s->id >> 0) & 0xff;
break;
default:
bad_cmd:
qemu_log_mask(LOG_GUEST_ERROR,
"%s: unknown command %02x\n", __func__, s->cmd);
break;
}
return ret;
}
| 4,725 |
qemu | a9f20d31a8332ea4d6a0c90b9731f11a51cff6db | 0 | static void set_encodings(VncState *vs, int32_t *encodings, size_t n_encodings)
{
int i;
unsigned int enc = 0;
vnc_zlib_init(vs);
vs->features = 0;
vs->vnc_encoding = -1;
vs->tight_compression = 9;
vs->tight_quality = 9;
vs->absolute = -1;
for (i = n_encodings - 1; i >= 0; i--) {
enc = encodings[i];
switch (enc) {
case VNC_ENCODING_RAW:
if (vs->vnc_encoding != -1) {
vs->vnc_encoding = enc;
}
break;
case VNC_ENCODING_COPYRECT:
vs->features |= VNC_FEATURE_COPYRECT_MASK;
break;
case VNC_ENCODING_HEXTILE:
vs->features |= VNC_FEATURE_HEXTILE_MASK;
if (vs->vnc_encoding != -1) {
vs->vnc_encoding = enc;
}
break;
case VNC_ENCODING_ZLIB:
vs->features |= VNC_FEATURE_ZLIB_MASK;
if (vs->vnc_encoding != -1) {
vs->vnc_encoding = enc;
}
break;
case VNC_ENCODING_DESKTOPRESIZE:
vs->features |= VNC_FEATURE_RESIZE_MASK;
break;
case VNC_ENCODING_POINTER_TYPE_CHANGE:
vs->features |= VNC_FEATURE_POINTER_TYPE_CHANGE_MASK;
break;
case VNC_ENCODING_RICH_CURSOR:
vs->features |= VNC_FEATURE_RICH_CURSOR_MASK;
break;
case VNC_ENCODING_EXT_KEY_EVENT:
send_ext_key_event_ack(vs);
break;
case VNC_ENCODING_AUDIO:
send_ext_audio_ack(vs);
break;
case VNC_ENCODING_WMVi:
vs->features |= VNC_FEATURE_WMVI_MASK;
break;
case VNC_ENCODING_COMPRESSLEVEL0 ... VNC_ENCODING_COMPRESSLEVEL0 + 9:
vs->tight_compression = (enc & 0x0F);
break;
case VNC_ENCODING_QUALITYLEVEL0 ... VNC_ENCODING_QUALITYLEVEL0 + 9:
vs->tight_quality = (enc & 0x0F);
break;
default:
VNC_DEBUG("Unknown encoding: %d (0x%.8x): %d\n", i, enc, enc);
break;
}
}
check_pointer_type_change(&vs->mouse_mode_notifier);
}
| 4,726 |
qemu | a8a358bf35e660b1c0bf5adc5446836c6c0d1c73 | 0 | static int cpu_x86_find_by_name(x86_def_t *x86_cpu_def, const char *cpu_model)
{
unsigned int i;
x86_def_t *def;
char *s = strdup(cpu_model);
char *featurestr, *name = strtok(s, ",");
uint32_t plus_features = 0, plus_ext_features = 0, plus_ext2_features = 0, plus_ext3_features = 0;
uint32_t minus_features = 0, minus_ext_features = 0, minus_ext2_features = 0, minus_ext3_features = 0;
int family = -1, model = -1, stepping = -1;
def = NULL;
for (i = 0; i < ARRAY_SIZE(x86_defs); i++) {
if (strcmp(name, x86_defs[i].name) == 0) {
def = &x86_defs[i];
break;
}
}
if (kvm_enabled() && strcmp(name, "host") == 0) {
cpu_x86_fill_host(x86_cpu_def);
} else if (!def) {
goto error;
} else {
memcpy(x86_cpu_def, def, sizeof(*def));
}
add_flagname_to_bitmaps("hypervisor", &plus_features,
&plus_ext_features, &plus_ext2_features, &plus_ext3_features);
featurestr = strtok(NULL, ",");
while (featurestr) {
char *val;
if (featurestr[0] == '+') {
add_flagname_to_bitmaps(featurestr + 1, &plus_features, &plus_ext_features, &plus_ext2_features, &plus_ext3_features);
} else if (featurestr[0] == '-') {
add_flagname_to_bitmaps(featurestr + 1, &minus_features, &minus_ext_features, &minus_ext2_features, &minus_ext3_features);
} else if ((val = strchr(featurestr, '='))) {
*val = 0; val++;
if (!strcmp(featurestr, "family")) {
char *err;
family = strtol(val, &err, 10);
if (!*val || *err || family < 0) {
fprintf(stderr, "bad numerical value %s\n", val);
goto error;
}
x86_cpu_def->family = family;
} else if (!strcmp(featurestr, "model")) {
char *err;
model = strtol(val, &err, 10);
if (!*val || *err || model < 0 || model > 0xff) {
fprintf(stderr, "bad numerical value %s\n", val);
goto error;
}
x86_cpu_def->model = model;
} else if (!strcmp(featurestr, "stepping")) {
char *err;
stepping = strtol(val, &err, 10);
if (!*val || *err || stepping < 0 || stepping > 0xf) {
fprintf(stderr, "bad numerical value %s\n", val);
goto error;
}
x86_cpu_def->stepping = stepping;
} else if (!strcmp(featurestr, "vendor")) {
if (strlen(val) != 12) {
fprintf(stderr, "vendor string must be 12 chars long\n");
goto error;
}
x86_cpu_def->vendor1 = 0;
x86_cpu_def->vendor2 = 0;
x86_cpu_def->vendor3 = 0;
for(i = 0; i < 4; i++) {
x86_cpu_def->vendor1 |= ((uint8_t)val[i ]) << (8 * i);
x86_cpu_def->vendor2 |= ((uint8_t)val[i + 4]) << (8 * i);
x86_cpu_def->vendor3 |= ((uint8_t)val[i + 8]) << (8 * i);
}
x86_cpu_def->vendor_override = 1;
} else if (!strcmp(featurestr, "model_id")) {
pstrcpy(x86_cpu_def->model_id, sizeof(x86_cpu_def->model_id),
val);
} else {
fprintf(stderr, "unrecognized feature %s\n", featurestr);
goto error;
}
} else {
fprintf(stderr, "feature string `%s' not in format (+feature|-feature|feature=xyz)\n", featurestr);
goto error;
}
featurestr = strtok(NULL, ",");
}
x86_cpu_def->features |= plus_features;
x86_cpu_def->ext_features |= plus_ext_features;
x86_cpu_def->ext2_features |= plus_ext2_features;
x86_cpu_def->ext3_features |= plus_ext3_features;
x86_cpu_def->features &= ~minus_features;
x86_cpu_def->ext_features &= ~minus_ext_features;
x86_cpu_def->ext2_features &= ~minus_ext2_features;
x86_cpu_def->ext3_features &= ~minus_ext3_features;
free(s);
return 0;
error:
free(s);
return -1;
}
| 4,727 |
qemu | 92c93a816a8c04071264f9fb47cbc90a5e1ae5d8 | 0 | static void ppc_spapr_init(ram_addr_t ram_size,
const char *boot_device,
const char *kernel_filename,
const char *kernel_cmdline,
const char *initrd_filename,
const char *cpu_model)
{
CPUState *env;
int i;
MemoryRegion *sysmem = get_system_memory();
MemoryRegion *ram = g_new(MemoryRegion, 1);
target_phys_addr_t rma_alloc_size, rma_size;
uint32_t initrd_base;
long kernel_size, initrd_size, fw_size;
long pteg_shift = 17;
char *filename;
spapr = g_malloc0(sizeof(*spapr));
QLIST_INIT(&spapr->phbs);
cpu_ppc_hypercall = emulate_spapr_hypercall;
/* Allocate RMA if necessary */
rma_alloc_size = kvmppc_alloc_rma("ppc_spapr.rma", sysmem);
if (rma_alloc_size == -1) {
hw_error("qemu: Unable to create RMA\n");
exit(1);
}
if (rma_alloc_size && (rma_alloc_size < ram_size)) {
rma_size = rma_alloc_size;
} else {
rma_size = ram_size;
}
/* We place the device tree just below either the top of the RMA,
* or just below 2GB, whichever is lowere, so that it can be
* processed with 32-bit real mode code if necessary */
spapr->fdt_addr = MIN(rma_size, 0x80000000) - FDT_MAX_SIZE;
spapr->rtas_addr = spapr->fdt_addr - RTAS_MAX_SIZE;
/* init CPUs */
if (cpu_model == NULL) {
cpu_model = kvm_enabled() ? "host" : "POWER7";
}
for (i = 0; i < smp_cpus; i++) {
env = cpu_init(cpu_model);
if (!env) {
fprintf(stderr, "Unable to find PowerPC CPU definition\n");
exit(1);
}
/* Set time-base frequency to 512 MHz */
cpu_ppc_tb_init(env, TIMEBASE_FREQ);
qemu_register_reset((QEMUResetHandler *)&cpu_reset, env);
env->hreset_vector = 0x60;
env->hreset_excp_prefix = 0;
env->gpr[3] = env->cpu_index;
}
/* allocate RAM */
spapr->ram_limit = ram_size;
if (spapr->ram_limit > rma_alloc_size) {
ram_addr_t nonrma_base = rma_alloc_size;
ram_addr_t nonrma_size = spapr->ram_limit - rma_alloc_size;
memory_region_init_ram(ram, NULL, "ppc_spapr.ram", nonrma_size);
memory_region_add_subregion(sysmem, nonrma_base, ram);
}
/* allocate hash page table. For now we always make this 16mb,
* later we should probably make it scale to the size of guest
* RAM */
spapr->htab_size = 1ULL << (pteg_shift + 7);
spapr->htab = qemu_memalign(spapr->htab_size, spapr->htab_size);
for (env = first_cpu; env != NULL; env = env->next_cpu) {
env->external_htab = spapr->htab;
env->htab_base = -1;
env->htab_mask = spapr->htab_size - 1;
/* Tell KVM that we're in PAPR mode */
env->spr[SPR_SDR1] = (unsigned long)spapr->htab |
((pteg_shift + 7) - 18);
env->spr[SPR_HIOR] = 0;
if (kvm_enabled()) {
kvmppc_set_papr(env);
}
}
filename = qemu_find_file(QEMU_FILE_TYPE_BIOS, "spapr-rtas.bin");
spapr->rtas_size = load_image_targphys(filename, spapr->rtas_addr,
ram_size - spapr->rtas_addr);
if (spapr->rtas_size < 0) {
hw_error("qemu: could not load LPAR rtas '%s'\n", filename);
exit(1);
}
g_free(filename);
/* Set up Interrupt Controller */
spapr->icp = xics_system_init(XICS_IRQS);
spapr->next_irq = 16;
/* Set up VIO bus */
spapr->vio_bus = spapr_vio_bus_init();
for (i = 0; i < MAX_SERIAL_PORTS; i++) {
if (serial_hds[i]) {
spapr_vty_create(spapr->vio_bus, SPAPR_VTY_BASE_ADDRESS + i,
serial_hds[i]);
}
}
/* Set up PCI */
spapr_create_phb(spapr, "pci", SPAPR_PCI_BUID,
SPAPR_PCI_MEM_WIN_ADDR,
SPAPR_PCI_MEM_WIN_SIZE,
SPAPR_PCI_IO_WIN_ADDR);
for (i = 0; i < nb_nics; i++) {
NICInfo *nd = &nd_table[i];
if (!nd->model) {
nd->model = g_strdup("ibmveth");
}
if (strcmp(nd->model, "ibmveth") == 0) {
spapr_vlan_create(spapr->vio_bus, 0x1000 + i, nd);
} else {
pci_nic_init_nofail(&nd_table[i], nd->model, NULL);
}
}
for (i = 0; i <= drive_get_max_bus(IF_SCSI); i++) {
spapr_vscsi_create(spapr->vio_bus, 0x2000 + i);
}
if (kernel_filename) {
uint64_t lowaddr = 0;
kernel_size = load_elf(kernel_filename, translate_kernel_address, NULL,
NULL, &lowaddr, NULL, 1, ELF_MACHINE, 0);
if (kernel_size < 0) {
kernel_size = load_image_targphys(kernel_filename,
KERNEL_LOAD_ADDR,
ram_size - KERNEL_LOAD_ADDR);
}
if (kernel_size < 0) {
fprintf(stderr, "qemu: could not load kernel '%s'\n",
kernel_filename);
exit(1);
}
/* load initrd */
if (initrd_filename) {
initrd_base = INITRD_LOAD_ADDR;
initrd_size = load_image_targphys(initrd_filename, initrd_base,
ram_size - initrd_base);
if (initrd_size < 0) {
fprintf(stderr, "qemu: could not load initial ram disk '%s'\n",
initrd_filename);
exit(1);
}
} else {
initrd_base = 0;
initrd_size = 0;
}
spapr->entry_point = KERNEL_LOAD_ADDR;
} else {
if (ram_size < (MIN_RAM_SLOF << 20)) {
fprintf(stderr, "qemu: pSeries SLOF firmware requires >= "
"%ldM guest RAM\n", MIN_RAM_SLOF);
exit(1);
}
filename = qemu_find_file(QEMU_FILE_TYPE_BIOS, FW_FILE_NAME);
fw_size = load_image_targphys(filename, 0, FW_MAX_SIZE);
if (fw_size < 0) {
hw_error("qemu: could not load LPAR rtas '%s'\n", filename);
exit(1);
}
g_free(filename);
spapr->entry_point = 0x100;
initrd_base = 0;
initrd_size = 0;
/* SLOF will startup the secondary CPUs using RTAS,
rather than expecting a kexec() style entry */
for (env = first_cpu; env != NULL; env = env->next_cpu) {
env->halted = 1;
}
}
/* Prepare the device tree */
spapr->fdt_skel = spapr_create_fdt_skel(cpu_model, rma_size,
initrd_base, initrd_size,
boot_device, kernel_cmdline,
pteg_shift + 7);
assert(spapr->fdt_skel != NULL);
qemu_register_reset(spapr_reset, spapr);
}
| 4,728 |
FFmpeg | 747a0554ea8ad09404c1f5b80239ebd8d71b291e | 1 | static int swf_write_video(AVFormatContext *s,
AVCodecContext *enc, const uint8_t *buf, int size)
{
ByteIOContext *pb = &s->pb;
static int tag_id = 0;
if (enc->frame_number > 1) {
/* remove the shape */
put_swf_tag(s, TAG_REMOVEOBJECT);
put_le16(pb, SHAPE_ID); /* shape ID */
put_le16(pb, 1); /* depth */
put_swf_end_tag(s);
/* free the bitmap */
put_swf_tag(s, TAG_FREECHARACTER);
put_le16(pb, BITMAP_ID);
put_swf_end_tag(s);
}
put_swf_tag(s, TAG_JPEG2 | TAG_LONG);
put_le16(pb, tag_id); /* ID of the image */
/* a dummy jpeg header seems to be required */
put_byte(pb, 0xff);
put_byte(pb, 0xd8);
put_byte(pb, 0xff);
put_byte(pb, 0xd9);
/* write the jpeg image */
put_buffer(pb, buf, size);
put_swf_end_tag(s);
/* draw the shape */
put_swf_tag(s, TAG_PLACEOBJECT);
put_le16(pb, SHAPE_ID); /* shape ID */
put_le16(pb, 1); /* depth */
put_swf_matrix(pb, 1 << FRAC_BITS, 0, 0, 1 << FRAC_BITS, 0, 0);
put_swf_end_tag(s);
/* output the frame */
put_swf_tag(s, TAG_SHOWFRAME);
put_swf_end_tag(s);
put_flush_packet(&s->pb);
return 0;
}
| 4,730 |
qemu | 4ed1c57a64992d84376b446b0c60edff2486681b | 1 | static int ehci_state_writeback(EHCIQueue *q)
{
EHCIPacket *p = QTAILQ_FIRST(&q->packets);
int again = 0;
/* Write back the QTD from the QH area */
assert(p != NULL);
assert(p->qtdaddr == q->qtdaddr);
ehci_trace_qtd(q, NLPTR_GET(p->qtdaddr), (EHCIqtd *) &q->qh.next_qtd);
put_dwords(q->ehci, NLPTR_GET(p->qtdaddr), (uint32_t *) &q->qh.next_qtd,
sizeof(EHCIqtd) >> 2);
ehci_free_packet(p);
/*
* EHCI specs say go horizontal here.
*
* We can also advance the queue here for performance reasons. We
* need to take care to only take that shortcut in case we've
* processed the qtd just written back without errors, i.e. halt
* bit is clear.
*/
if (q->qh.token & QTD_TOKEN_HALT) {
ehci_set_state(q->ehci, q->async, EST_HORIZONTALQH);
again = 1;
} else {
ehci_set_state(q->ehci, q->async, EST_ADVANCEQUEUE);
again = 1;
}
return again;
}
| 4,731 |
FFmpeg | a9f3bb14ba8b303cf87c42b8fe7e423571176d54 | 1 | static int mov_skip_multiple_stsd(MOVContext *c, AVIOContext *pb,
int codec_tag, int format,
int size)
{
int video_codec_id = ff_codec_get_id(ff_codec_movvideo_tags, format);
if (codec_tag &&
(codec_tag != format &&
(c->fc->video_codec_id ? video_codec_id != c->fc->video_codec_id
: codec_tag != MKTAG('j','p','e','g')))) {
/* Multiple fourcc, we skip JPEG. This is not correct, we should
* export it as a separate AVStream but this needs a few changes
* in the MOV demuxer, patch welcome. */
av_log(c->fc, AV_LOG_WARNING, "multiple fourcc not supported\n");
avio_skip(pb, size);
return 1;
}
if ( codec_tag == AV_RL32("avc1") ||
codec_tag == AV_RL32("hvc1") ||
codec_tag == AV_RL32("hev1")
)
av_log(c->fc, AV_LOG_WARNING, "Concatenated H.264 or H.265 might not play correctly.\n");
return 0;
}
| 4,732 |
qemu | 7843c0d60db694b6d97e14ec5538fb97424016c1 | 1 | static uint32_t cas_check_pvr(PowerPCCPU *cpu, target_ulong *addr,
Error **errp)
{
bool explicit_match = false; /* Matched the CPU's real PVR */
uint32_t max_compat = cpu->max_compat;
uint32_t best_compat = 0;
int i;
/*
* We scan the supplied table of PVRs looking for two things
* 1. Is our real CPU PVR in the list?
* 2. What's the "best" listed logical PVR
*/
for (i = 0; i < 512; ++i) {
uint32_t pvr, pvr_mask;
pvr_mask = ldl_be_phys(&address_space_memory, *addr);
pvr = ldl_be_phys(&address_space_memory, *addr + 4);
*addr += 8;
if (~pvr_mask & pvr) {
break; /* Terminator record */
}
if ((cpu->env.spr[SPR_PVR] & pvr_mask) == (pvr & pvr_mask)) {
explicit_match = true;
} else {
if (ppc_check_compat(cpu, pvr, best_compat, max_compat)) {
best_compat = pvr;
}
}
}
if ((best_compat == 0) && (!explicit_match || max_compat)) {
/* We couldn't find a suitable compatibility mode, and either
* the guest doesn't support "raw" mode for this CPU, or raw
* mode is disabled because a maximum compat mode is set */
error_setg(errp, "Couldn't negotiate a suitable PVR during CAS");
return 0;
}
/* Parsing finished */
trace_spapr_cas_pvr(cpu->compat_pvr, explicit_match, best_compat);
return best_compat;
}
| 4,733 |
FFmpeg | b505f15b1530d72682b3314e84936f80fe6e43b2 | 1 | void ff_slice_thread_free(AVCodecContext *avctx)
{
SliceThreadContext *c = avctx->internal->thread_ctx;
int i;
pthread_mutex_lock(&c->current_job_lock);
c->done = 1;
pthread_cond_broadcast(&c->current_job_cond);
for (i = 0; i < c->thread_count; i++)
pthread_cond_broadcast(&c->progress_cond[i]);
pthread_mutex_unlock(&c->current_job_lock);
for (i=0; i<avctx->thread_count; i++)
pthread_join(c->workers[i], NULL);
for (i = 0; i < c->thread_count; i++) {
pthread_mutex_destroy(&c->progress_mutex[i]);
pthread_cond_destroy(&c->progress_cond[i]);
}
pthread_mutex_destroy(&c->current_job_lock);
pthread_cond_destroy(&c->current_job_cond);
pthread_cond_destroy(&c->last_job_cond);
av_freep(&c->entries);
av_freep(&c->progress_mutex);
av_freep(&c->progress_cond);
av_freep(&c->workers);
av_freep(&avctx->internal->thread_ctx);
}
| 4,735 |
FFmpeg | fc78b0cb7e115ae494861c37a9928cff74df8db9 | 1 | RTPDemuxContext *rtp_parse_open(AVFormatContext *s1, AVStream *st, URLContext *rtpc, int payload_type, RTPPayloadData *rtp_payload_data)
{
RTPDemuxContext *s;
s = av_mallocz(sizeof(RTPDemuxContext));
if (!s)
return NULL;
s->payload_type = payload_type;
s->last_rtcp_ntp_time = AV_NOPTS_VALUE;
s->first_rtcp_ntp_time = AV_NOPTS_VALUE;
s->ic = s1;
s->st = st;
s->rtp_payload_data = rtp_payload_data;
rtp_init_statistics(&s->statistics, 0); // do we know the initial sequence from sdp?
if (!strcmp(ff_rtp_enc_name(payload_type), "MP2T")) {
s->ts = ff_mpegts_parse_open(s->ic);
if (s->ts == NULL) {
av_free(s);
return NULL;
}
} else {
av_set_pts_info(st, 32, 1, 90000);
switch(st->codec->codec_id) {
case CODEC_ID_MPEG1VIDEO:
case CODEC_ID_MPEG2VIDEO:
case CODEC_ID_MP2:
case CODEC_ID_MP3:
case CODEC_ID_MPEG4:
case CODEC_ID_H263:
case CODEC_ID_H264:
st->need_parsing = AVSTREAM_PARSE_FULL;
break;
default:
if (st->codec->codec_type == CODEC_TYPE_AUDIO) {
av_set_pts_info(st, 32, 1, st->codec->sample_rate);
}
break;
}
}
// needed to send back RTCP RR in RTSP sessions
s->rtp_ctx = rtpc;
gethostname(s->hostname, sizeof(s->hostname));
return s;
}
| 4,736 |
FFmpeg | 5eb765ef341c3ec1bea31914c897750f88476ede | 1 | static int http_send_data(HTTPContext *c, long cur_time)
{
int len, ret;
while (c->buffer_ptr >= c->buffer_end) {
ret = http_prepare_data(c, cur_time);
if (ret < 0)
return -1;
else if (ret == 0) {
continue;
} else {
/* state change requested */
return 0;
}
}
if (c->buffer_end > c->buffer_ptr) {
len = write(c->fd, c->buffer_ptr, c->buffer_end - c->buffer_ptr);
if (len < 0) {
if (errno != EAGAIN && errno != EINTR) {
/* error : close connection */
return -1;
}
} else {
c->buffer_ptr += len;
c->data_count += len;
if (c->stream)
c->stream->bytes_served += len;
}
}
return 0;
}
| 4,737 |
qemu | 5e8e3c4c75c199aa1017db816fca02be2a9f8798 | 1 | static void virgl_cmd_resource_unref(VirtIOGPU *g,
struct virtio_gpu_ctrl_command *cmd)
{
struct virtio_gpu_resource_unref unref;
VIRTIO_GPU_FILL_CMD(unref);
trace_virtio_gpu_cmd_res_unref(unref.resource_id);
virgl_renderer_resource_unref(unref.resource_id);
| 4,738 |
qemu | 0b8b8753e4d94901627b3e86431230f2319215c4 | 1 | static void perf_yield(void)
{
unsigned int i, maxcycles;
double duration;
maxcycles = 100000000;
i = maxcycles;
Coroutine *coroutine = qemu_coroutine_create(yield_loop);
g_test_timer_start();
while (i > 0) {
qemu_coroutine_enter(coroutine, &i);
}
duration = g_test_timer_elapsed();
g_test_message("Yield %u iterations: %f s\n",
maxcycles, duration);
}
| 4,739 |
FFmpeg | f6774f905fb3cfdc319523ac640be30b14c1bc55 | 1 | static void fill_picture_parameters(AVCodecContext *avctx,
struct dxva_context *ctx,
const struct MpegEncContext *s,
DXVA_PictureParameters *pp)
{
const Picture *current_picture = s->current_picture_ptr;
int is_field = s->picture_structure != PICT_FRAME;
memset(pp, 0, sizeof(*pp));
pp->wDecodedPictureIndex = ff_dxva2_get_surface_index(ctx, ¤t_picture->f);
pp->wDeblockedPictureIndex = 0;
if (s->pict_type != AV_PICTURE_TYPE_I)
pp->wForwardRefPictureIndex = ff_dxva2_get_surface_index(ctx, &s->last_picture.f);
else
pp->wForwardRefPictureIndex = 0xffff;
if (s->pict_type == AV_PICTURE_TYPE_B)
pp->wBackwardRefPictureIndex = ff_dxva2_get_surface_index(ctx, &s->next_picture.f);
else
pp->wBackwardRefPictureIndex = 0xffff;
pp->wPicWidthInMBminus1 = s->mb_width - 1;
pp->wPicHeightInMBminus1 = (s->mb_height >> is_field) - 1;
pp->bMacroblockWidthMinus1 = 15;
pp->bMacroblockHeightMinus1 = 15;
pp->bBlockWidthMinus1 = 7;
pp->bBlockHeightMinus1 = 7;
pp->bBPPminus1 = 7;
pp->bPicStructure = s->picture_structure;
pp->bSecondField = is_field && !s->first_field;
pp->bPicIntra = s->pict_type == AV_PICTURE_TYPE_I;
pp->bPicBackwardPrediction = s->pict_type == AV_PICTURE_TYPE_B;
pp->bBidirectionalAveragingMode = 0;
pp->bMVprecisionAndChromaRelation= 0; /* FIXME */
pp->bChromaFormat = s->chroma_format;
pp->bPicScanFixed = 1;
pp->bPicScanMethod = s->alternate_scan ? 1 : 0;
pp->bPicReadbackRequests = 0;
pp->bRcontrol = 0;
pp->bPicSpatialResid8 = 0;
pp->bPicOverflowBlocks = 0;
pp->bPicExtrapolation = 0;
pp->bPicDeblocked = 0;
pp->bPicDeblockConfined = 0;
pp->bPic4MVallowed = 0;
pp->bPicOBMC = 0;
pp->bPicBinPB = 0;
pp->bMV_RPS = 0;
pp->bReservedBits = 0;
pp->wBitstreamFcodes = (s->mpeg_f_code[0][0] << 12) |
(s->mpeg_f_code[0][1] << 8) |
(s->mpeg_f_code[1][0] << 4) |
(s->mpeg_f_code[1][1] );
pp->wBitstreamPCEelements = (s->intra_dc_precision << 14) |
(s->picture_structure << 12) |
(s->top_field_first << 11) |
(s->frame_pred_frame_dct << 10) |
(s->concealment_motion_vectors << 9) |
(s->q_scale_type << 8) |
(s->intra_vlc_format << 7) |
(s->alternate_scan << 6) |
(s->repeat_first_field << 5) |
(s->chroma_420_type << 4) |
(s->progressive_frame << 3);
pp->bBitstreamConcealmentNeed = 0;
pp->bBitstreamConcealmentMethod = 0;
}
| 4,740 |
qemu | 6d0ee85040e4d238e2483191fe6e74aebbecd5d5 | 1 | static int virtio_balloon_load(QEMUFile *f, void *opaque, int version_id)
{
VirtIOBalloon *s = opaque;
if (version_id != 1)
return -EINVAL;
virtio_load(&s->vdev, f);
s->num_pages = qemu_get_be32(f);
s->actual = qemu_get_be32(f);
qemu_get_buffer(f, (uint8_t *)&s->stats_vq_elem, sizeof(VirtQueueElement));
qemu_get_buffer(f, (uint8_t *)&s->stats_vq_offset, sizeof(size_t));
qemu_get_buffer(f, (uint8_t *)&s->stats_callback, sizeof(MonitorCompletion));
qemu_get_buffer(f, (uint8_t *)&s->stats_opaque_callback_data, sizeof(void));
return 0;
}
| 4,741 |
qemu | 2ccf97ec0f1b7a62a3220064f305454f3932c55a | 1 | static inline void init_thread(struct target_pt_regs *_regs, struct image_info *infop)
{
_regs->gpr[1] = infop->start_stack;
#if defined(TARGET_PPC64) && !defined(TARGET_ABI32)
if (get_ppc64_abi(infop) < 2) {
_regs->gpr[2] = ldq_raw(infop->entry + 8) + infop->load_bias;
infop->entry = ldq_raw(infop->entry) + infop->load_bias;
} else {
_regs->gpr[12] = infop->entry; /* r12 set to global entry address */
}
#endif
_regs->nip = infop->entry;
}
| 4,742 |
FFmpeg | b71528d8967c1e10f499432fe26ff4713d1fd3b6 | 1 | static int mov_read_custom_2plus(MOVContext *c, AVIOContext *pb, int size)
{
int64_t end = avio_tell(pb) + size;
uint8_t *key = NULL, *val = NULL;
int i;
AVStream *st;
MOVStreamContext *sc;
if (c->fc->nb_streams < 1)
return 0;
st = c->fc->streams[c->fc->nb_streams-1];
sc = st->priv_data;
for (i = 0; i < 2; i++) {
uint8_t **p;
uint32_t len, tag;
if (end - avio_tell(pb) <= 12)
break;
len = avio_rb32(pb);
tag = avio_rl32(pb);
avio_skip(pb, 4); // flags
if (len < 12 || len - 12 > end - avio_tell(pb))
break;
len -= 12;
if (tag == MKTAG('n', 'a', 'm', 'e'))
p = &key;
else if (tag == MKTAG('d', 'a', 't', 'a') && len > 4) {
avio_skip(pb, 4);
len -= 4;
p = &val;
} else
break;
*p = av_malloc(len + 1);
if (!*p)
break;
avio_read(pb, *p, len);
(*p)[len] = 0;
}
if (key && val) {
if (strcmp(key, "iTunSMPB") == 0) {
int priming, remainder, samples;
if(sscanf(val, "%*X %X %X %X", &priming, &remainder, &samples) == 3){
if(priming>0 && priming<16384)
sc->start_pad = priming;
}
}
if (strcmp(key, "cdec") != 0) {
av_dict_set(&c->fc->metadata, key, val,
AV_DICT_DONT_STRDUP_KEY | AV_DICT_DONT_STRDUP_VAL);
key = val = NULL;
}
}
avio_seek(pb, end, SEEK_SET);
av_freep(&key);
av_freep(&val);
return 0;
}
| 4,743 |
qemu | 4f4321c11ff6e98583846bfd6f0e81954924b003 | 1 | static void softusb_usbdev_datain(void *opaque)
{
MilkymistSoftUsbState *s = opaque;
USBPacket p;
p.pid = USB_TOKEN_IN;
p.devep = 1;
p.data = s->kbd_usb_buffer;
p.len = sizeof(s->kbd_usb_buffer);
s->usbdev->info->handle_data(s->usbdev, &p);
softusb_kbd_changed(s);
}
| 4,744 |
FFmpeg | 95e5323510aa9526b7771d20deadc80c19f215eb | 0 | static int a52_decode_init(AVCodecContext *avctx)
{
AC3DecodeState *s = avctx->priv_data;
#ifdef CONFIG_LIBA52BIN
s->handle = dlopen(liba52name, RTLD_LAZY);
if (!s->handle)
{
av_log( avctx, AV_LOG_ERROR, "A52 library %s could not be opened! \n%s\n", liba52name, dlerror());
return -1;
}
s->a52_init = (a52_state_t* (*)(uint32_t)) dlsymm(s->handle, "a52_init");
s->a52_samples = (sample_t* (*)(a52_state_t*)) dlsymm(s->handle, "a52_samples");
s->a52_syncinfo = (int (*)(uint8_t*, int*, int*, int*)) dlsymm(s->handle, "a52_syncinfo");
s->a52_frame = (int (*)(a52_state_t*, uint8_t*, int*, sample_t*, sample_t)) dlsymm(s->handle, "a52_frame");
s->a52_block = (int (*)(a52_state_t*)) dlsymm(s->handle, "a52_block");
s->a52_free = (void (*)(a52_state_t*)) dlsymm(s->handle, "a52_free");
if (!s->a52_init || !s->a52_samples || !s->a52_syncinfo
|| !s->a52_frame || !s->a52_block || !s->a52_free)
{
dlclose(s->handle);
return -1;
}
#else
s->handle = 0;
s->a52_init = a52_init;
s->a52_samples = a52_samples;
s->a52_syncinfo = a52_syncinfo;
s->a52_frame = a52_frame;
s->a52_block = a52_block;
s->a52_free = a52_free;
#endif
s->state = s->a52_init(0); /* later use CPU flags */
s->samples = s->a52_samples(s->state);
s->inbuf_ptr = s->inbuf;
s->frame_size = 0;
/* allow downmixing to stereo or mono */
if (avctx->channels > 0 && avctx->request_channels > 0 &&
avctx->request_channels < avctx->channels &&
avctx->request_channels <= 2) {
avctx->channels = avctx->request_channels;
}
return 0;
}
| 4,745 |
qemu | 8daea510951dd309a44cea8de415c685c43851cf | 1 | static void release_drive(Object *obj, const char *name, void *opaque)
{
DeviceState *dev = DEVICE(obj);
Property *prop = opaque;
BlockBackend **ptr = qdev_get_prop_ptr(dev, prop);
if (*ptr) {
blk_detach_dev(*ptr, dev);
blockdev_auto_del(*ptr);
}
}
| 4,746 |
FFmpeg | 88e7a4d18c63799a21dff4a570ceb8008e310820 | 1 | static inline int decode_picture_parameter_set(H264Context *h, int bit_length){
MpegEncContext * const s = &h->s;
unsigned int pps_id= get_ue_golomb(&s->gb);
PPS *pps;
if(pps_id>=MAX_PPS_COUNT){
av_log(h->s.avctx, AV_LOG_ERROR, "pps_id out of range\n");
return -1;
}
pps = &h->pps_buffer[pps_id];
pps->sps_id= get_ue_golomb(&s->gb);
pps->cabac= get_bits1(&s->gb);
pps->pic_order_present= get_bits1(&s->gb);
pps->slice_group_count= get_ue_golomb(&s->gb) + 1;
if(pps->slice_group_count > 1 ){
pps->mb_slice_group_map_type= get_ue_golomb(&s->gb);
av_log(h->s.avctx, AV_LOG_ERROR, "FMO not supported\n");
switch(pps->mb_slice_group_map_type){
case 0:
#if 0
| for( i = 0; i <= num_slice_groups_minus1; i++ ) | | |
| run_length[ i ] |1 |ue(v) |
#endif
break;
case 2:
#if 0
| for( i = 0; i < num_slice_groups_minus1; i++ ) | | |
|{ | | |
| top_left_mb[ i ] |1 |ue(v) |
| bottom_right_mb[ i ] |1 |ue(v) |
| } | | |
#endif
break;
case 3:
case 4:
case 5:
#if 0
| slice_group_change_direction_flag |1 |u(1) |
| slice_group_change_rate_minus1 |1 |ue(v) |
#endif
break;
case 6:
#if 0
| slice_group_id_cnt_minus1 |1 |ue(v) |
| for( i = 0; i <= slice_group_id_cnt_minus1; i++ | | |
|) | | |
| slice_group_id[ i ] |1 |u(v) |
#endif
break;
}
}
pps->ref_count[0]= get_ue_golomb(&s->gb) + 1;
pps->ref_count[1]= get_ue_golomb(&s->gb) + 1;
if(pps->ref_count[0] > 32 || pps->ref_count[1] > 32){
av_log(h->s.avctx, AV_LOG_ERROR, "reference overflow (pps)\n");
return -1;
}
pps->weighted_pred= get_bits1(&s->gb);
pps->weighted_bipred_idc= get_bits(&s->gb, 2);
pps->init_qp= get_se_golomb(&s->gb) + 26;
pps->init_qs= get_se_golomb(&s->gb) + 26;
pps->chroma_qp_index_offset= get_se_golomb(&s->gb);
pps->deblocking_filter_parameters_present= get_bits1(&s->gb);
pps->constrained_intra_pred= get_bits1(&s->gb);
pps->redundant_pic_cnt_present = get_bits1(&s->gb);
pps->transform_8x8_mode= 0;
h->dequant_coeff_pps= -1; //contents of sps/pps can change even if id doesn't, so reinit
memset(pps->scaling_matrix4, 16, 6*16*sizeof(uint8_t));
memset(pps->scaling_matrix8, 16, 2*64*sizeof(uint8_t));
if(get_bits_count(&s->gb) < bit_length){
pps->transform_8x8_mode= get_bits1(&s->gb);
decode_scaling_matrices(h, &h->sps_buffer[pps->sps_id], pps, 0, pps->scaling_matrix4, pps->scaling_matrix8);
get_se_golomb(&s->gb); //second_chroma_qp_index_offset
}
if(s->avctx->debug&FF_DEBUG_PICT_INFO){
av_log(h->s.avctx, AV_LOG_DEBUG, "pps:%u sps:%u %s slice_groups:%d ref:%d/%d %s qp:%d/%d/%d %s %s %s %s\n",
pps_id, pps->sps_id,
pps->cabac ? "CABAC" : "CAVLC",
pps->slice_group_count,
pps->ref_count[0], pps->ref_count[1],
pps->weighted_pred ? "weighted" : "",
pps->init_qp, pps->init_qs, pps->chroma_qp_index_offset,
pps->deblocking_filter_parameters_present ? "LPAR" : "",
pps->constrained_intra_pred ? "CONSTR" : "",
pps->redundant_pic_cnt_present ? "REDU" : "",
pps->transform_8x8_mode ? "8x8DCT" : ""
);
}
return 0;
}
| 4,747 |
qemu | 66b9b43c42049bcae37668e890fedde9a72c8167 | 1 | static uint64_t watch_mem_read(void *opaque, hwaddr addr,
unsigned size)
{
check_watchpoint(addr & ~TARGET_PAGE_MASK, size, BP_MEM_READ);
switch (size) {
case 1: return ldub_phys(&address_space_memory, addr);
case 2: return lduw_phys(&address_space_memory, addr);
case 4: return ldl_phys(&address_space_memory, addr);
default: abort();
}
}
| 4,748 |
FFmpeg | 6ba5cbc699e77cae66bb719354fa142114b64eab | 0 | static int tcp_open(URLContext *h, const char *uri, int flags)
{
struct sockaddr_in dest_addr;
char hostname[1024], *q;
int port, fd = -1;
TCPContext *s;
const char *p;
fd_set wfds;
int fd_max, ret;
struct timeval tv;
socklen_t optlen;
s = av_malloc(sizeof(TCPContext));
if (!s)
return -ENOMEM;
h->priv_data = s;
p = uri;
if (!strstart(p, "tcp://", &p))
goto fail;
q = hostname;
while (*p != ':' && *p != '/' && *p != '\0') {
if ((q - hostname) < sizeof(hostname) - 1)
*q++ = *p;
p++;
}
*q = '\0';
if (*p != ':')
goto fail;
p++;
port = strtoul(p, (char **)&p, 10);
if (port <= 0 || port >= 65536)
goto fail;
dest_addr.sin_family = AF_INET;
dest_addr.sin_port = htons(port);
if (resolve_host(&dest_addr.sin_addr, hostname) < 0)
goto fail;
fd = socket(PF_INET, SOCK_STREAM, 0);
if (fd < 0)
goto fail;
fcntl(fd, F_SETFL, O_NONBLOCK);
redo:
ret = connect(fd, (struct sockaddr *)&dest_addr,
sizeof(dest_addr));
if (ret < 0) {
if (errno == EINTR)
goto redo;
if (errno != EINPROGRESS)
goto fail;
/* wait until we are connected or until abort */
for(;;) {
if (url_interrupt_cb()) {
ret = -EINTR;
goto fail1;
}
fd_max = fd;
FD_ZERO(&wfds);
FD_SET(fd, &wfds);
tv.tv_sec = 0;
tv.tv_usec = 100 * 1000;
ret = select(fd_max + 1, NULL, &wfds, NULL, &tv);
if (ret > 0 && FD_ISSET(fd, &wfds))
break;
}
/* test error */
optlen = sizeof(ret);
getsockopt (fd, SOL_SOCKET, SO_ERROR, &ret, &optlen);
if (ret != 0)
goto fail;
}
s->fd = fd;
return 0;
fail:
ret = AVERROR_IO;
fail1:
if (fd >= 0)
close(fd);
av_free(s);
return ret;
}
| 4,749 |
FFmpeg | 22de0f8369f1f3edf1a55e1d275f3c07c617b53e | 0 | static int mov_write_mvhd_tag(AVIOContext *pb, MOVMuxContext *mov)
{
int max_track_id = 1, i;
int64_t max_track_len_temp, max_track_len = 0;
int version;
for (i = 0; i < mov->nb_streams; i++) {
if (mov->tracks[i].entry > 0) {
max_track_len_temp = av_rescale_rnd(mov->tracks[i].track_duration,
MOV_TIMESCALE,
mov->tracks[i].timescale,
AV_ROUND_UP);
if (max_track_len < max_track_len_temp)
max_track_len = max_track_len_temp;
if (max_track_id < mov->tracks[i].track_id)
max_track_id = mov->tracks[i].track_id;
}
}
version = max_track_len < UINT32_MAX ? 0 : 1;
(version == 1) ? avio_wb32(pb, 120) : avio_wb32(pb, 108); /* size */
ffio_wfourcc(pb, "mvhd");
avio_w8(pb, version);
avio_wb24(pb, 0); /* flags */
if (version == 1) {
avio_wb64(pb, mov->time);
avio_wb64(pb, mov->time);
} else {
avio_wb32(pb, mov->time); /* creation time */
avio_wb32(pb, mov->time); /* modification time */
}
avio_wb32(pb, MOV_TIMESCALE);
(version == 1) ? avio_wb64(pb, max_track_len) : avio_wb32(pb, max_track_len); /* duration of longest track */
avio_wb32(pb, 0x00010000); /* reserved (preferred rate) 1.0 = normal */
avio_wb16(pb, 0x0100); /* reserved (preferred volume) 1.0 = normal */
avio_wb16(pb, 0); /* reserved */
avio_wb32(pb, 0); /* reserved */
avio_wb32(pb, 0); /* reserved */
/* Matrix structure */
avio_wb32(pb, 0x00010000); /* reserved */
avio_wb32(pb, 0x0); /* reserved */
avio_wb32(pb, 0x0); /* reserved */
avio_wb32(pb, 0x0); /* reserved */
avio_wb32(pb, 0x00010000); /* reserved */
avio_wb32(pb, 0x0); /* reserved */
avio_wb32(pb, 0x0); /* reserved */
avio_wb32(pb, 0x0); /* reserved */
avio_wb32(pb, 0x40000000); /* reserved */
avio_wb32(pb, 0); /* reserved (preview time) */
avio_wb32(pb, 0); /* reserved (preview duration) */
avio_wb32(pb, 0); /* reserved (poster time) */
avio_wb32(pb, 0); /* reserved (selection time) */
avio_wb32(pb, 0); /* reserved (selection duration) */
avio_wb32(pb, 0); /* reserved (current time) */
avio_wb32(pb, max_track_id + 1); /* Next track id */
return 0x6c;
}
| 4,750 |
FFmpeg | 689f65126be8a55e8a1e706cb56b19bb975c20ce | 0 | static inline void idct4col_put(uint8_t *dest, int line_size, const DCTELEM *col)
{
int c0, c1, c2, c3, a0, a1, a2, a3;
const uint8_t *cm = ff_cropTbl + MAX_NEG_CROP;
a0 = col[8*0];
a1 = col[8*2];
a2 = col[8*4];
a3 = col[8*6];
c0 = ((a0 + a2) << (CN_SHIFT - 1)) + (1 << (C_SHIFT - 1));
c2 = ((a0 - a2) << (CN_SHIFT - 1)) + (1 << (C_SHIFT - 1));
c1 = a1 * C1 + a3 * C2;
c3 = a1 * C2 - a3 * C1;
dest[0] = cm[(c0 + c1) >> C_SHIFT];
dest += line_size;
dest[0] = cm[(c2 + c3) >> C_SHIFT];
dest += line_size;
dest[0] = cm[(c2 - c3) >> C_SHIFT];
dest += line_size;
dest[0] = cm[(c0 - c1) >> C_SHIFT];
}
| 4,751 |
FFmpeg | b3f461508176236bd9d7c2762fd70e339b1494f8 | 0 | static void print_final_stats(int64_t total_size)
{
uint64_t video_size = 0, audio_size = 0, extra_size = 0, other_size = 0;
uint64_t subtitle_size = 0;
uint64_t data_size = 0;
float percent = -1.0;
int i, j;
for (i = 0; i < nb_output_streams; i++) {
OutputStream *ost = output_streams[i];
switch (ost->st->codec->codec_type) {
case AVMEDIA_TYPE_VIDEO: video_size += ost->data_size; break;
case AVMEDIA_TYPE_AUDIO: audio_size += ost->data_size; break;
case AVMEDIA_TYPE_SUBTITLE: subtitle_size += ost->data_size; break;
default: other_size += ost->data_size; break;
}
extra_size += ost->st->codec->extradata_size;
data_size += ost->data_size;
}
if (data_size && total_size >= data_size)
percent = 100.0 * (total_size - data_size) / data_size;
av_log(NULL, AV_LOG_INFO, "\n");
av_log(NULL, AV_LOG_INFO, "video:%1.0fkB audio:%1.0fkB subtitle:%1.0fkB other streams:%1.0fkB global headers:%1.0fkB muxing overhead: ",
video_size / 1024.0,
audio_size / 1024.0,
subtitle_size / 1024.0,
other_size / 1024.0,
extra_size / 1024.0);
if (percent >= 0.0)
av_log(NULL, AV_LOG_INFO, "%f%%", percent);
else
av_log(NULL, AV_LOG_INFO, "unknown");
av_log(NULL, AV_LOG_INFO, "\n");
/* print verbose per-stream stats */
for (i = 0; i < nb_input_files; i++) {
InputFile *f = input_files[i];
uint64_t total_packets = 0, total_size = 0;
av_log(NULL, AV_LOG_VERBOSE, "Input file #%d (%s):\n",
i, f->ctx->filename);
for (j = 0; j < f->nb_streams; j++) {
InputStream *ist = input_streams[f->ist_index + j];
enum AVMediaType type = ist->st->codec->codec_type;
total_size += ist->data_size;
total_packets += ist->nb_packets;
av_log(NULL, AV_LOG_VERBOSE, " Input stream #%d:%d (%s): ",
i, j, media_type_string(type));
av_log(NULL, AV_LOG_VERBOSE, "%"PRIu64" packets read (%"PRIu64" bytes); ",
ist->nb_packets, ist->data_size);
if (ist->decoding_needed) {
av_log(NULL, AV_LOG_VERBOSE, "%"PRIu64" frames decoded",
ist->frames_decoded);
if (type == AVMEDIA_TYPE_AUDIO)
av_log(NULL, AV_LOG_VERBOSE, " (%"PRIu64" samples)", ist->samples_decoded);
av_log(NULL, AV_LOG_VERBOSE, "; ");
}
av_log(NULL, AV_LOG_VERBOSE, "\n");
}
av_log(NULL, AV_LOG_VERBOSE, " Total: %"PRIu64" packets (%"PRIu64" bytes) demuxed\n",
total_packets, total_size);
}
for (i = 0; i < nb_output_files; i++) {
OutputFile *of = output_files[i];
uint64_t total_packets = 0, total_size = 0;
av_log(NULL, AV_LOG_VERBOSE, "Output file #%d (%s):\n",
i, of->ctx->filename);
for (j = 0; j < of->ctx->nb_streams; j++) {
OutputStream *ost = output_streams[of->ost_index + j];
enum AVMediaType type = ost->st->codec->codec_type;
total_size += ost->data_size;
total_packets += ost->packets_written;
av_log(NULL, AV_LOG_VERBOSE, " Output stream #%d:%d (%s): ",
i, j, media_type_string(type));
if (ost->encoding_needed) {
av_log(NULL, AV_LOG_VERBOSE, "%"PRIu64" frames encoded",
ost->frames_encoded);
if (type == AVMEDIA_TYPE_AUDIO)
av_log(NULL, AV_LOG_VERBOSE, " (%"PRIu64" samples)", ost->samples_encoded);
av_log(NULL, AV_LOG_VERBOSE, "; ");
}
av_log(NULL, AV_LOG_VERBOSE, "%"PRIu64" packets muxed (%"PRIu64" bytes); ",
ost->packets_written, ost->data_size);
av_log(NULL, AV_LOG_VERBOSE, "\n");
}
av_log(NULL, AV_LOG_VERBOSE, " Total: %"PRIu64" packets (%"PRIu64" bytes) muxed\n",
total_packets, total_size);
}
if(video_size + data_size + audio_size + subtitle_size + extra_size == 0){
av_log(NULL, AV_LOG_WARNING, "Output file is empty, nothing was encoded (check -ss / -t / -frames parameters if used)\n");
}
}
| 4,752 |
FFmpeg | 58b1cba0c9173741cf769117a735b429356d83c0 | 1 | static void read_sbr_channel_pair_element(AACContext *ac,
SpectralBandReplication *sbr,
GetBitContext *gb)
{
if (get_bits1(gb)) // bs_data_extra
skip_bits(gb, 8); // bs_reserved
if ((sbr->bs_coupling = get_bits1(gb))) {
read_sbr_grid(ac, sbr, gb, &sbr->data[0]);
copy_sbr_grid(&sbr->data[1], &sbr->data[0]);
read_sbr_dtdf(sbr, gb, &sbr->data[0]);
read_sbr_dtdf(sbr, gb, &sbr->data[1]);
read_sbr_invf(sbr, gb, &sbr->data[0]);
memcpy(sbr->data[1].bs_invf_mode[1], sbr->data[1].bs_invf_mode[0], sizeof(sbr->data[1].bs_invf_mode[0]));
memcpy(sbr->data[1].bs_invf_mode[0], sbr->data[0].bs_invf_mode[0], sizeof(sbr->data[1].bs_invf_mode[0]));
read_sbr_envelope(sbr, gb, &sbr->data[0], 0);
read_sbr_noise(sbr, gb, &sbr->data[0], 0);
read_sbr_envelope(sbr, gb, &sbr->data[1], 1);
read_sbr_noise(sbr, gb, &sbr->data[1], 1);
} else {
read_sbr_grid(ac, sbr, gb, &sbr->data[0]);
read_sbr_grid(ac, sbr, gb, &sbr->data[1]);
read_sbr_dtdf(sbr, gb, &sbr->data[0]);
read_sbr_dtdf(sbr, gb, &sbr->data[1]);
read_sbr_invf(sbr, gb, &sbr->data[0]);
read_sbr_invf(sbr, gb, &sbr->data[1]);
read_sbr_envelope(sbr, gb, &sbr->data[0], 0);
read_sbr_envelope(sbr, gb, &sbr->data[1], 1);
read_sbr_noise(sbr, gb, &sbr->data[0], 0);
read_sbr_noise(sbr, gb, &sbr->data[1], 1);
}
if ((sbr->data[0].bs_add_harmonic_flag = get_bits1(gb)))
get_bits1_vector(gb, sbr->data[0].bs_add_harmonic, sbr->n[1]);
if ((sbr->data[1].bs_add_harmonic_flag = get_bits1(gb)))
get_bits1_vector(gb, sbr->data[1].bs_add_harmonic, sbr->n[1]);
}
| 4,753 |
FFmpeg | de64d8cf171c6ecdca22d57f0bdd7efec95d0c0e | 1 | static void qtrle_decode_16bpp(QtrleContext *s, int stream_ptr, int row_ptr, int lines_to_change)
{
int rle_code;
int pixel_ptr;
int row_inc = s->frame.linesize[0];
unsigned short rgb16;
unsigned char *rgb = s->frame.data[0];
int pixel_limit = s->frame.linesize[0] * s->avctx->height;
while (lines_to_change--) {
CHECK_STREAM_PTR(2);
pixel_ptr = row_ptr + (s->buf[stream_ptr++] - 1) * 2;
while ((rle_code = (signed char)s->buf[stream_ptr++]) != -1) {
if (rle_code == 0) {
/* there's another skip code in the stream */
CHECK_STREAM_PTR(1);
pixel_ptr += (s->buf[stream_ptr++] - 1) * 2;
CHECK_PIXEL_PTR(0); /* make sure pixel_ptr is positive */
} else if (rle_code < 0) {
/* decode the run length code */
rle_code = -rle_code;
CHECK_STREAM_PTR(2);
rgb16 = AV_RB16(&s->buf[stream_ptr]);
stream_ptr += 2;
CHECK_PIXEL_PTR(rle_code * 2);
while (rle_code--) {
*(unsigned short *)(&rgb[pixel_ptr]) = rgb16;
pixel_ptr += 2;
}
} else {
CHECK_STREAM_PTR(rle_code * 2);
CHECK_PIXEL_PTR(rle_code * 2);
/* copy pixels directly to output */
while (rle_code--) {
rgb16 = AV_RB16(&s->buf[stream_ptr]);
stream_ptr += 2;
*(unsigned short *)(&rgb[pixel_ptr]) = rgb16;
pixel_ptr += 2;
}
}
}
row_ptr += row_inc;
}
}
| 4,754 |
qemu | c572f23a3e7180dbeab5e86583e43ea2afed6271 | 1 | static void v9fs_mknod(void *opaque)
{
int mode;
gid_t gid;
int32_t fid;
V9fsQID qid;
int err = 0;
int major, minor;
size_t offset = 7;
V9fsString name;
struct stat stbuf;
V9fsFidState *fidp;
V9fsPDU *pdu = opaque;
V9fsState *s = pdu->s;
pdu_unmarshal(pdu, offset, "dsdddd", &fid, &name, &mode,
&major, &minor, &gid);
fidp = get_fid(pdu, fid);
if (fidp == NULL) {
err = -ENOENT;
goto out_nofid;
}
err = v9fs_co_mknod(pdu, fidp, &name, fidp->uid, gid,
makedev(major, minor), mode, &stbuf);
if (err < 0) {
goto out;
}
stat_to_qid(&stbuf, &qid);
err = offset;
err += pdu_marshal(pdu, offset, "Q", &qid);
out:
put_fid(pdu, fidp);
out_nofid:
trace_v9fs_mknod_return(pdu->tag, pdu->id, qid.type, qid.version, qid.path);
complete_pdu(s, pdu, err);
v9fs_string_free(&name);
} | 4,755 |
qemu | fb0e8e79a9d77ee240dbca036fa8698ce654e5d1 | 1 | void HELPER(cpsr_write_eret)(CPUARMState *env, uint32_t val)
{
cpsr_write(env, val, CPSR_ERET_MASK, CPSRWriteExceptionReturn);
arm_call_el_change_hook(arm_env_get_cpu(env));
} | 4,756 |
FFmpeg | 7b5ff7d57355dc608f0fd86e3ab32a2fda65e752 | 1 | static void vp7_decode_mb_row_no_filter(AVCodecContext *avctx, void *tdata,
int jobnr, int threadnr)
{
decode_mb_row_no_filter(avctx, tdata, jobnr, threadnr, 1);
}
| 4,758 |
qemu | daa76aa416b1e18ab1fac650ff53d966d8f21f68 | 1 | static void test_parse_path_subprocess(void)
{
/* All these should work without issue */
qemu_set_log_filename("/tmp/qemu.log");
qemu_set_log_filename("/tmp/qemu-%d.log");
qemu_set_log_filename("/tmp/qemu.log.%d");
}
| 4,759 |
FFmpeg | 4bff9ef9d0781c4de228bf1f85634d2706fc589b | 0 | static inline void RENAME(hScale)(int16_t *dst, int dstW, uint8_t *src, int srcW, int xInc,
int16_t *filter, int16_t *filterPos, long filterSize)
{
#ifdef HAVE_MMX
assert(filterSize % 4 == 0 && filterSize>0);
if(filterSize==4) // allways true for upscaling, sometimes for down too
{
long counter= -2*dstW;
filter-= counter*2;
filterPos-= counter/2;
dst-= counter/2;
asm volatile(
"pxor %%mm7, %%mm7 \n\t"
"movq "MANGLE(w02)", %%mm6 \n\t"
"push %%"REG_BP" \n\t" // we use 7 regs here ...
"mov %%"REG_a", %%"REG_BP" \n\t"
ASMALIGN16
"1: \n\t"
"movzwl (%2, %%"REG_BP"), %%eax \n\t"
"movzwl 2(%2, %%"REG_BP"), %%ebx\n\t"
"movq (%1, %%"REG_BP", 4), %%mm1\n\t"
"movq 8(%1, %%"REG_BP", 4), %%mm3\n\t"
"movd (%3, %%"REG_a"), %%mm0 \n\t"
"movd (%3, %%"REG_b"), %%mm2 \n\t"
"punpcklbw %%mm7, %%mm0 \n\t"
"punpcklbw %%mm7, %%mm2 \n\t"
"pmaddwd %%mm1, %%mm0 \n\t"
"pmaddwd %%mm2, %%mm3 \n\t"
"psrad $8, %%mm0 \n\t"
"psrad $8, %%mm3 \n\t"
"packssdw %%mm3, %%mm0 \n\t"
"pmaddwd %%mm6, %%mm0 \n\t"
"packssdw %%mm0, %%mm0 \n\t"
"movd %%mm0, (%4, %%"REG_BP") \n\t"
"add $4, %%"REG_BP" \n\t"
" jnc 1b \n\t"
"pop %%"REG_BP" \n\t"
: "+a" (counter)
: "c" (filter), "d" (filterPos), "S" (src), "D" (dst)
: "%"REG_b
);
}
else if(filterSize==8)
{
long counter= -2*dstW;
filter-= counter*4;
filterPos-= counter/2;
dst-= counter/2;
asm volatile(
"pxor %%mm7, %%mm7 \n\t"
"movq "MANGLE(w02)", %%mm6 \n\t"
"push %%"REG_BP" \n\t" // we use 7 regs here ...
"mov %%"REG_a", %%"REG_BP" \n\t"
ASMALIGN16
"1: \n\t"
"movzwl (%2, %%"REG_BP"), %%eax \n\t"
"movzwl 2(%2, %%"REG_BP"), %%ebx\n\t"
"movq (%1, %%"REG_BP", 8), %%mm1\n\t"
"movq 16(%1, %%"REG_BP", 8), %%mm3\n\t"
"movd (%3, %%"REG_a"), %%mm0 \n\t"
"movd (%3, %%"REG_b"), %%mm2 \n\t"
"punpcklbw %%mm7, %%mm0 \n\t"
"punpcklbw %%mm7, %%mm2 \n\t"
"pmaddwd %%mm1, %%mm0 \n\t"
"pmaddwd %%mm2, %%mm3 \n\t"
"movq 8(%1, %%"REG_BP", 8), %%mm1\n\t"
"movq 24(%1, %%"REG_BP", 8), %%mm5\n\t"
"movd 4(%3, %%"REG_a"), %%mm4 \n\t"
"movd 4(%3, %%"REG_b"), %%mm2 \n\t"
"punpcklbw %%mm7, %%mm4 \n\t"
"punpcklbw %%mm7, %%mm2 \n\t"
"pmaddwd %%mm1, %%mm4 \n\t"
"pmaddwd %%mm2, %%mm5 \n\t"
"paddd %%mm4, %%mm0 \n\t"
"paddd %%mm5, %%mm3 \n\t"
"psrad $8, %%mm0 \n\t"
"psrad $8, %%mm3 \n\t"
"packssdw %%mm3, %%mm0 \n\t"
"pmaddwd %%mm6, %%mm0 \n\t"
"packssdw %%mm0, %%mm0 \n\t"
"movd %%mm0, (%4, %%"REG_BP") \n\t"
"add $4, %%"REG_BP" \n\t"
" jnc 1b \n\t"
"pop %%"REG_BP" \n\t"
: "+a" (counter)
: "c" (filter), "d" (filterPos), "S" (src), "D" (dst)
: "%"REG_b
);
}
else
{
uint8_t *offset = src+filterSize;
long counter= -2*dstW;
// filter-= counter*filterSize/2;
filterPos-= counter/2;
dst-= counter/2;
asm volatile(
"pxor %%mm7, %%mm7 \n\t"
"movq "MANGLE(w02)", %%mm6 \n\t"
ASMALIGN16
"1: \n\t"
"mov %2, %%"REG_c" \n\t"
"movzwl (%%"REG_c", %0), %%eax \n\t"
"movzwl 2(%%"REG_c", %0), %%ebx \n\t"
"mov %5, %%"REG_c" \n\t"
"pxor %%mm4, %%mm4 \n\t"
"pxor %%mm5, %%mm5 \n\t"
"2: \n\t"
"movq (%1), %%mm1 \n\t"
"movq (%1, %6), %%mm3 \n\t"
"movd (%%"REG_c", %%"REG_a"), %%mm0\n\t"
"movd (%%"REG_c", %%"REG_b"), %%mm2\n\t"
"punpcklbw %%mm7, %%mm0 \n\t"
"punpcklbw %%mm7, %%mm2 \n\t"
"pmaddwd %%mm1, %%mm0 \n\t"
"pmaddwd %%mm2, %%mm3 \n\t"
"paddd %%mm3, %%mm5 \n\t"
"paddd %%mm0, %%mm4 \n\t"
"add $8, %1 \n\t"
"add $4, %%"REG_c" \n\t"
"cmp %4, %%"REG_c" \n\t"
" jb 2b \n\t"
"add %6, %1 \n\t"
"psrad $8, %%mm4 \n\t"
"psrad $8, %%mm5 \n\t"
"packssdw %%mm5, %%mm4 \n\t"
"pmaddwd %%mm6, %%mm4 \n\t"
"packssdw %%mm4, %%mm4 \n\t"
"mov %3, %%"REG_a" \n\t"
"movd %%mm4, (%%"REG_a", %0) \n\t"
"add $4, %0 \n\t"
" jnc 1b \n\t"
: "+r" (counter), "+r" (filter)
: "m" (filterPos), "m" (dst), "m"(offset),
"m" (src), "r" (filterSize*2)
: "%"REG_b, "%"REG_a, "%"REG_c
);
}
#else
#ifdef HAVE_ALTIVEC
hScale_altivec_real(dst, dstW, src, srcW, xInc, filter, filterPos, filterSize);
#else
int i;
for(i=0; i<dstW; i++)
{
int j;
int srcPos= filterPos[i];
int val=0;
// printf("filterPos: %d\n", filterPos[i]);
for(j=0; j<filterSize; j++)
{
// printf("filter: %d, src: %d\n", filter[i], src[srcPos + j]);
val += ((int)src[srcPos + j])*filter[filterSize*i + j];
}
// filter += hFilterSize;
dst[i] = FFMIN(FFMAX(0, val>>7), (1<<15)-1); // the cubic equation does overflow ...
// dst[i] = val>>7;
}
#endif
#endif
}
| 4,763 |
FFmpeg | 90901860c21468d6e9ae437c2bacb099c7bd3acf | 0 | int audio_resample(ReSampleContext *s, short *output, short *input, int nb_samples)
{
int i, nb_samples1;
short *bufin[2];
short *bufout[2];
short *buftmp2[2], *buftmp3[2];
int lenout;
if (s->input_channels == s->output_channels && s->ratio == 1.0 && 0) {
/* nothing to do */
memcpy(output, input, nb_samples * s->input_channels * sizeof(short));
return nb_samples;
}
/* XXX: move those malloc to resample init code */
for(i=0; i<s->filter_channels; i++){
bufin[i]= (short*) av_malloc( (nb_samples + s->temp_len) * sizeof(short) );
memcpy(bufin[i], s->temp[i], s->temp_len * sizeof(short));
buftmp2[i] = bufin[i] + s->temp_len;
}
/* make some zoom to avoid round pb */
lenout= (int)(4*nb_samples * s->ratio) + 16;
bufout[0]= (short*) av_malloc( lenout * sizeof(short) );
bufout[1]= (short*) av_malloc( lenout * sizeof(short) );
if (s->input_channels == 2 &&
s->output_channels == 1) {
buftmp3[0] = output;
stereo_to_mono(buftmp2[0], input, nb_samples);
} else if (s->output_channels >= 2 && s->input_channels == 1) {
buftmp3[0] = bufout[0];
memcpy(buftmp2[0], input, nb_samples*sizeof(short));
} else if (s->output_channels >= 2) {
buftmp3[0] = bufout[0];
buftmp3[1] = bufout[1];
stereo_split(buftmp2[0], buftmp2[1], input, nb_samples);
} else {
buftmp3[0] = output;
memcpy(buftmp2[0], input, nb_samples*sizeof(short));
}
nb_samples += s->temp_len;
/* resample each channel */
nb_samples1 = 0; /* avoid warning */
for(i=0;i<s->filter_channels;i++) {
int consumed;
int is_last= i+1 == s->filter_channels;
nb_samples1 = av_resample(s->resample_context, buftmp3[i], bufin[i], &consumed, nb_samples, lenout, is_last);
s->temp_len= nb_samples - consumed;
s->temp[i]= av_realloc(s->temp[i], s->temp_len*sizeof(short));
memcpy(s->temp[i], bufin[i] + consumed, s->temp_len*sizeof(short));
}
if (s->output_channels == 2 && s->input_channels == 1) {
mono_to_stereo(output, buftmp3[0], nb_samples1);
} else if (s->output_channels == 2) {
stereo_mux(output, buftmp3[0], buftmp3[1], nb_samples1);
} else if (s->output_channels == 6) {
ac3_5p1_mux(output, buftmp3[0], buftmp3[1], nb_samples1);
}
for(i=0; i<s->filter_channels; i++)
av_free(bufin[i]);
av_free(bufout[0]);
av_free(bufout[1]);
return nb_samples1;
}
| 4,764 |
FFmpeg | c6939f65a116b1ffed345d29d8621ee4ffb32235 | 0 | static int filter_slice(AVFilterContext *ctx, void *arg, int jobnr,
int nb_jobs)
{
TransContext *s = ctx->priv;
ThreadData *td = arg;
AVFrame *out = td->out;
AVFrame *in = td->in;
int plane;
for (plane = 0; out->data[plane]; plane++) {
int hsub = plane == 1 || plane == 2 ? s->hsub : 0;
int vsub = plane == 1 || plane == 2 ? s->vsub : 0;
int pixstep = s->pixsteps[plane];
int inh = AV_CEIL_RSHIFT(in->height, vsub);
int outw = AV_CEIL_RSHIFT(out->width, hsub);
int outh = AV_CEIL_RSHIFT(out->height, vsub);
int start = (outh * jobnr ) / nb_jobs;
int end = (outh * (jobnr+1)) / nb_jobs;
uint8_t *dst, *src;
int dstlinesize, srclinesize;
int x, y;
dstlinesize = out->linesize[plane];
dst = out->data[plane] + start * dstlinesize;
src = in->data[plane];
srclinesize = in->linesize[plane];
if (s->dir & 1) {
src += in->linesize[plane] * (inh - 1);
srclinesize *= -1;
}
if (s->dir & 2) {
dst = out->data[plane] + dstlinesize * (outh - start - 1);
dstlinesize *= -1;
}
for (y = start; y < end - 7; y += 8) {
for (x = 0; x < outw - 7; x += 8) {
s->transpose_8x8(src + x * srclinesize + y * pixstep,
srclinesize,
dst + (y - start) * dstlinesize + x * pixstep,
dstlinesize);
}
if (outw - x > 0 && end - y > 0)
s->transpose_block(src + x * srclinesize + y * pixstep,
srclinesize,
dst + (y - start) * dstlinesize + x * pixstep,
dstlinesize, outw - x, end - y);
}
if (end - y > 0)
s->transpose_block(src + 0 * srclinesize + y * pixstep,
srclinesize,
dst + (y - start) * dstlinesize + 0 * pixstep,
dstlinesize, outw, end - y);
}
return 0;
}
| 4,765 |
FFmpeg | 4cd0bdae9a62d1f0366e60603222762af31e5289 | 1 | static int expand_rle_row(const uint8_t *in_buf, const uint8_t* in_end,
unsigned char *out_buf, uint8_t* out_end, int pixelstride)
{
unsigned char pixel, count;
unsigned char *orig = out_buf;
while (1) {
if(in_buf + 1 > in_end) return -1;
pixel = bytestream_get_byte(&in_buf);
if (!(count = (pixel & 0x7f))) {
return (out_buf - orig) / pixelstride;
}
/* Check for buffer overflow. */
if(out_buf + pixelstride * count >= out_end) return -1;
if (pixel & 0x80) {
while (count--) {
*out_buf = bytestream_get_byte(&in_buf);
out_buf += pixelstride;
}
} else {
pixel = bytestream_get_byte(&in_buf);
while (count--) {
*out_buf = pixel;
out_buf += pixelstride;
}
}
}
}
| 4,771 |
FFmpeg | cdedf7e6254024c643532d45ac7c68e84e50eb01 | 0 | static void encode_exponents_blk_ch(uint8_t *exp,
int nb_exps, int exp_strategy,
uint8_t *num_exp_groups)
{
int group_size, nb_groups, i, j, k, exp_min;
group_size = exp_strategy + (exp_strategy == EXP_D45);
*num_exp_groups = (nb_exps + (group_size * 3) - 4) / (3 * group_size);
nb_groups = *num_exp_groups * 3;
/* for each group, compute the minimum exponent */
if (exp_strategy > EXP_D15) {
k = 1;
for (i = 1; i <= nb_groups; i++) {
exp_min = exp[k];
assert(exp_min >= 0 && exp_min <= 24);
for (j = 1; j < group_size; j++) {
if (exp[k+j] < exp_min)
exp_min = exp[k+j];
}
exp[i] = exp_min;
k += group_size;
}
}
/* constraint for DC exponent */
if (exp[0] > 15)
exp[0] = 15;
/* decrease the delta between each groups to within 2 so that they can be
differentially encoded */
for (i = 1; i <= nb_groups; i++)
exp[i] = FFMIN(exp[i], exp[i-1] + 2);
for (i = nb_groups-1; i >= 0; i--)
exp[i] = FFMIN(exp[i], exp[i+1] + 2);
/* now we have the exponent values the decoder will see */
if (exp_strategy > EXP_D15) {
k = nb_groups * group_size;
for (i = nb_groups; i > 0; i--) {
for (j = 0; j < group_size; j++)
exp[k-j] = exp[i];
k -= group_size;
}
}
}
| 4,772 |
FFmpeg | dc0ad40de2b0d6995eb842e56b22f9096bd539ff | 0 | static void ac3_update_bap_counts_c(uint16_t mant_cnt[16], uint8_t *bap,
int len)
{
while (len-- >= 0)
mant_cnt[bap[len]]++;
}
| 4,773 |
FFmpeg | 3f9fa2d0b58b142b165d4a8eaa61d7e837a76838 | 0 | static av_cold int adpcm_decode_init(AVCodecContext * avctx)
{
ADPCMDecodeContext *c = avctx->priv_data;
unsigned int min_channels = 1;
unsigned int max_channels = 2;
switch(avctx->codec->id) {
case AV_CODEC_ID_ADPCM_DTK:
case AV_CODEC_ID_ADPCM_EA:
min_channels = 2;
break;
case AV_CODEC_ID_ADPCM_AFC:
case AV_CODEC_ID_ADPCM_EA_R1:
case AV_CODEC_ID_ADPCM_EA_R2:
case AV_CODEC_ID_ADPCM_EA_R3:
case AV_CODEC_ID_ADPCM_EA_XAS:
max_channels = 6;
break;
case AV_CODEC_ID_ADPCM_THP:
case AV_CODEC_ID_ADPCM_THP_LE:
max_channels = 10;
break;
}
if (avctx->channels < min_channels || avctx->channels > max_channels) {
av_log(avctx, AV_LOG_ERROR, "Invalid number of channels\n");
return AVERROR(EINVAL);
}
switch(avctx->codec->id) {
case AV_CODEC_ID_ADPCM_CT:
c->status[0].step = c->status[1].step = 511;
break;
case AV_CODEC_ID_ADPCM_IMA_WAV:
if (avctx->bits_per_coded_sample < 2 || avctx->bits_per_coded_sample > 5)
return AVERROR_INVALIDDATA;
break;
case AV_CODEC_ID_ADPCM_IMA_APC:
if (avctx->extradata && avctx->extradata_size >= 8) {
c->status[0].predictor = AV_RL32(avctx->extradata);
c->status[1].predictor = AV_RL32(avctx->extradata + 4);
}
break;
case AV_CODEC_ID_ADPCM_IMA_WS:
if (avctx->extradata && avctx->extradata_size >= 2)
c->vqa_version = AV_RL16(avctx->extradata);
break;
default:
break;
}
switch(avctx->codec->id) {
case AV_CODEC_ID_ADPCM_IMA_QT:
case AV_CODEC_ID_ADPCM_IMA_WAV:
case AV_CODEC_ID_ADPCM_4XM:
case AV_CODEC_ID_ADPCM_XA:
case AV_CODEC_ID_ADPCM_EA_R1:
case AV_CODEC_ID_ADPCM_EA_R2:
case AV_CODEC_ID_ADPCM_EA_R3:
case AV_CODEC_ID_ADPCM_EA_XAS:
case AV_CODEC_ID_ADPCM_THP:
case AV_CODEC_ID_ADPCM_THP_LE:
case AV_CODEC_ID_ADPCM_AFC:
case AV_CODEC_ID_ADPCM_DTK:
avctx->sample_fmt = AV_SAMPLE_FMT_S16P;
break;
case AV_CODEC_ID_ADPCM_IMA_WS:
avctx->sample_fmt = c->vqa_version == 3 ? AV_SAMPLE_FMT_S16P :
AV_SAMPLE_FMT_S16;
break;
default:
avctx->sample_fmt = AV_SAMPLE_FMT_S16;
}
return 0;
}
| 4,774 |
FFmpeg | a8305b0ea3ccfe00a50cd3312bfcc455c78aacb5 | 1 | static void test2_fill_picture(AVFilterContext *ctx, AVFrame *frame)
{
TestSourceContext *s = ctx->priv;
FFDrawColor color;
unsigned alpha = (uint32_t)s->alpha << 24;
/* colored background */
{
unsigned i, x = 0, x2;
x = 0;
for (i = 1; i < 7; i++) {
x2 = av_rescale(i, s->w, 6);
x2 = ff_draw_round_to_sub(&s->draw, 0, 0, x2);
set_color(s, &color, ((i & 1) ? 0xFF0000 : 0) |
((i & 2) ? 0x00FF00 : 0) |
((i & 4) ? 0x0000FF : 0) |
alpha);
ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
x, 0, x2 - x, frame->height);
x = x2;
}
}
/* oblique gradient */
/* note: too slow if using blending */
if (s->h >= 64) {
unsigned x, dx, y0, y, g0, g;
dx = ff_draw_round_to_sub(&s->draw, 0, +1, 1);
y0 = av_rescale_q(s->pts, s->time_base, av_make_q(2, s->h - 16));
g0 = av_rescale_q(s->pts, s->time_base, av_make_q(1, 128));
for (x = 0; x < s->w; x += dx) {
g = (av_rescale(x, 6 * 256, s->w) + g0) % (6 * 256);
set_color(s, &color, color_gradient(g) | alpha);
y = y0 + av_rescale(x, s->h / 2, s->w);
y %= 2 * (s->h - 16);
if (y > s->h - 16)
y = 2 * (s->h - 16) - y;
y = ff_draw_round_to_sub(&s->draw, 1, 0, y);
ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
x, y, dx, 16);
}
}
/* top right: draw clock hands */
if (s->w >= 64 && s->h >= 64) {
int l = (FFMIN(s->w, s->h) - 32) >> 1;
int steps = FFMAX(4, l >> 5);
int xc = (s->w >> 2) + (s->w >> 1);
int yc = (s->h >> 2);
int cycle = l << 2;
int pos, xh, yh;
int c, i;
for (c = 0; c < 3; c++) {
set_color(s, &color, (0xBBBBBB ^ (0xFF << (c << 3))) | alpha);
pos = av_rescale_q(s->pts, s->time_base, av_make_q(64 >> (c << 1), cycle)) % cycle;
xh = pos < 1 * l ? pos :
pos < 2 * l ? l :
pos < 3 * l ? 3 * l - pos : 0;
yh = pos < 1 * l ? 0 :
pos < 2 * l ? pos - l :
pos < 3 * l ? l :
cycle - pos;
xh -= l >> 1;
yh -= l >> 1;
for (i = 1; i <= steps; i++) {
int x = av_rescale(xh, i, steps) + xc;
int y = av_rescale(yh, i, steps) + yc;
x = ff_draw_round_to_sub(&s->draw, 0, -1, x);
y = ff_draw_round_to_sub(&s->draw, 1, -1, y);
ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
x, y, 8, 8);
}
}
}
/* bottom left: beating rectangles */
if (s->w >= 64 && s->h >= 64) {
int l = (FFMIN(s->w, s->h) - 16) >> 2;
int cycle = l << 3;
int xc = (s->w >> 2);
int yc = (s->h >> 2) + (s->h >> 1);
int xm1 = ff_draw_round_to_sub(&s->draw, 0, -1, xc - 8);
int xm2 = ff_draw_round_to_sub(&s->draw, 0, +1, xc + 8);
int ym1 = ff_draw_round_to_sub(&s->draw, 1, -1, yc - 8);
int ym2 = ff_draw_round_to_sub(&s->draw, 1, +1, yc + 8);
int size, step, x1, x2, y1, y2;
size = av_rescale_q(s->pts, s->time_base, av_make_q(4, cycle));
step = size / l;
size %= l;
if (step & 1)
size = l - size;
step = (step >> 1) & 3;
set_color(s, &color, 0xFF808080);
x1 = ff_draw_round_to_sub(&s->draw, 0, -1, xc - 4 - size);
x2 = ff_draw_round_to_sub(&s->draw, 0, +1, xc + 4 + size);
y1 = ff_draw_round_to_sub(&s->draw, 1, -1, yc - 4 - size);
y2 = ff_draw_round_to_sub(&s->draw, 1, +1, yc + 4 + size);
if (step == 0 || step == 2)
ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
x1, ym1, x2 - x1, ym2 - ym1);
if (step == 1 || step == 2)
ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
xm1, y1, xm2 - xm1, y2 - y1);
if (step == 3)
ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
x1, y1, x2 - x1, y2 - y1);
}
/* bottom right: checker with random noise */
{
unsigned xmin = av_rescale(5, s->w, 8);
unsigned xmax = av_rescale(7, s->w, 8);
unsigned ymin = av_rescale(5, s->h, 8);
unsigned ymax = av_rescale(7, s->h, 8);
unsigned x, y, i, r;
uint8_t alpha[256];
r = s->pts;
for (y = ymin; y < ymax - 15; y += 16) {
for (x = xmin; x < xmax - 15; x += 16) {
if ((x ^ y) & 16)
continue;
for (i = 0; i < 256; i++) {
r = r * 1664525 + 1013904223;
alpha[i] = r >> 24;
}
set_color(s, &color, 0xFF00FF80);
ff_blend_mask(&s->draw, &color, frame->data, frame->linesize,
frame->width, frame->height,
alpha, 16, 16, 16, 3, 0, x, y);
}
}
}
/* bouncing square */
if (s->w >= 16 && s->h >= 16) {
unsigned w = s->w - 8;
unsigned h = s->h - 8;
unsigned x = av_rescale_q(s->pts, s->time_base, av_make_q(233, 55 * w)) % (w << 1);
unsigned y = av_rescale_q(s->pts, s->time_base, av_make_q(233, 89 * h)) % (h << 1);
if (x > w)
x = (w << 1) - x;
if (y > h)
y = (h << 1) - y;
x = ff_draw_round_to_sub(&s->draw, 0, -1, x);
y = ff_draw_round_to_sub(&s->draw, 1, -1, y);
set_color(s, &color, 0xFF8000FF);
ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
x, y, 8, 8);
}
/* top right: draw frame time and frame number */
{
char buf[256];
unsigned time;
time = av_rescale_q(s->pts, s->time_base, av_make_q(1, 1000)) % 86400000;
set_color(s, &color, 0xC0000000);
ff_blend_rectangle(&s->draw, &color, frame->data, frame->linesize,
frame->width, frame->height,
2, 2, 100, 36);
set_color(s, &color, 0xFFFF8000);
snprintf(buf, sizeof(buf), "%02d:%02d:%02d.%03d\n%12"PRIi64,
time / 3600000, (time / 60000) % 60, (time / 1000) % 60,
time % 1000, s->pts);
draw_text(s, frame, &color, 4, 4, buf);
}
}
| 4,775 |
FFmpeg | 16c831851384ab59e73579fdd9913fbff3c0284a | 1 | static int iv_decode_frame(AVCodecContext *avctx,
const uint8_t *buf, int buf_size)
{
Indeo3DecodeContext *s = avctx->priv_data;
unsigned int image_width, image_height,
chroma_width, chroma_height;
unsigned long flags, cb_offset, data_size,
y_offset, v_offset, u_offset, mc_vector_count;
const uint8_t *hdr_pos, *buf_pos;
buf_pos = buf;
buf_pos += 18; /* skip OS header (16 bytes) and version number */
flags = bytestream_get_le16(&buf_pos);
data_size = bytestream_get_le32(&buf_pos);
cb_offset = *buf_pos++;
buf_pos += 3; /* skip reserved byte and checksum */
image_height = bytestream_get_le16(&buf_pos);
image_width = bytestream_get_le16(&buf_pos);
if(avcodec_check_dimensions(avctx, image_width, image_height))
return -1;
chroma_height = ((image_height >> 2) + 3) & 0x7ffc;
chroma_width = ((image_width >> 2) + 3) & 0x7ffc;
y_offset = bytestream_get_le32(&buf_pos);
v_offset = bytestream_get_le32(&buf_pos);
u_offset = bytestream_get_le32(&buf_pos);
buf_pos += 4; /* reserved */
hdr_pos = buf_pos;
if(data_size == 0x80) return 4;
if(FFMAX3(y_offset, v_offset, u_offset) >= buf_size-16) {
av_log(s->avctx, AV_LOG_ERROR, "y/u/v offset outside buffer\n");
return -1;
if(flags & 0x200) {
s->cur_frame = s->iv_frame + 1;
s->ref_frame = s->iv_frame;
} else {
s->cur_frame = s->iv_frame;
s->ref_frame = s->iv_frame + 1;
buf_pos = buf + 16 + y_offset;
mc_vector_count = bytestream_get_le32(&buf_pos);
if(2LL*mc_vector_count >= buf_size-16-y_offset) {
av_log(s->avctx, AV_LOG_ERROR, "mc_vector_count too large\n");
return -1;
iv_Decode_Chunk(s, s->cur_frame->Ybuf, s->ref_frame->Ybuf, image_width,
image_height, buf_pos + mc_vector_count * 2, cb_offset, hdr_pos, buf_pos,
FFMIN(image_width, 160));
if (!(s->avctx->flags & CODEC_FLAG_GRAY))
{
buf_pos = buf + 16 + v_offset;
mc_vector_count = bytestream_get_le32(&buf_pos);
if(2LL*mc_vector_count >= buf_size-16-v_offset) {
av_log(s->avctx, AV_LOG_ERROR, "mc_vector_count too large\n");
return -1;
iv_Decode_Chunk(s, s->cur_frame->Vbuf, s->ref_frame->Vbuf, chroma_width,
chroma_height, buf_pos + mc_vector_count * 2, cb_offset, hdr_pos, buf_pos,
FFMIN(chroma_width, 40));
buf_pos = buf + 16 + u_offset;
mc_vector_count = bytestream_get_le32(&buf_pos);
if(2LL*mc_vector_count >= buf_size-16-u_offset) {
av_log(s->avctx, AV_LOG_ERROR, "mc_vector_count too large\n");
return -1;
iv_Decode_Chunk(s, s->cur_frame->Ubuf, s->ref_frame->Ubuf, chroma_width,
chroma_height, buf_pos + mc_vector_count * 2, cb_offset, hdr_pos, buf_pos,
FFMIN(chroma_width, 40));
return 8;
| 4,776 |
FFmpeg | 11f24e71ff2b598d973fd24bcf950eebaea9b3e6 | 1 | static void output_packet(OutputFile *of, AVPacket *pkt, OutputStream *ost)
{
int ret = 0;
/* apply the output bitstream filters, if any */
if (ost->nb_bitstream_filters) {
int idx;
ret = av_bsf_send_packet(ost->bsf_ctx[0], pkt);
if (ret < 0)
goto finish;
idx = 1;
while (idx) {
/* get a packet from the previous filter up the chain */
ret = av_bsf_receive_packet(ost->bsf_ctx[idx - 1], pkt);
/* HACK! - aac_adtstoasc updates extradata after filtering the first frame when
* the api states this shouldn't happen after init(). Propagate it here to the
* muxer and to the next filters in the chain to workaround this.
* TODO/FIXME - Make aac_adtstoasc use new packet side data instead of changing
* par_out->extradata and adapt muxers accordingly to get rid of this. */
if (!(ost->bsf_extradata_updated[idx - 1] & 1)) {
ret = avcodec_parameters_copy(ost->st->codecpar, ost->bsf_ctx[idx - 1]->par_out);
if (ret < 0)
goto finish;
ost->bsf_extradata_updated[idx - 1] |= 1;
}
if (ret == AVERROR(EAGAIN)) {
ret = 0;
idx--;
continue;
} else if (ret < 0)
goto finish;
/* send it to the next filter down the chain or to the muxer */
if (idx < ost->nb_bitstream_filters) {
/* HACK/FIXME! - See above */
if (!(ost->bsf_extradata_updated[idx] & 2)) {
ret = avcodec_parameters_copy(ost->bsf_ctx[idx]->par_out, ost->bsf_ctx[idx - 1]->par_out);
if (ret < 0)
goto finish;
ost->bsf_extradata_updated[idx] |= 2;
}
ret = av_bsf_send_packet(ost->bsf_ctx[idx], pkt);
if (ret < 0)
goto finish;
idx++;
} else
write_packet(of, pkt, ost);
}
} else
write_packet(of, pkt, ost);
finish:
if (ret < 0 && ret != AVERROR_EOF) {
av_log(NULL, AV_LOG_ERROR, "Error applying bitstream filters to an output "
"packet for stream #%d:%d.\n", ost->file_index, ost->index);
if(exit_on_error)
exit_program(1);
}
} | 4,777 |
qemu | e7d81004e486b0e80a674d164d8aec0e83fa812f | 1 | static void audio_pp_nb_voices (const char *typ, int nb)
{
switch (nb) {
case 0:
printf ("Does not support %s\n", typ);
break;
case 1:
printf ("One %s voice\n", typ);
break;
case INT_MAX:
printf ("Theoretically supports many %s voices\n", typ);
break;
default:
printf ("Theoretically supports upto %d %s voices\n", nb, typ);
break;
}
}
| 4,778 |
FFmpeg | 604c9b1196c70d79bbbc1f23e75f6a8253a74da3 | 1 | int ff_rtsp_fetch_packet(AVFormatContext *s, AVPacket *pkt)
{
RTSPState *rt = s->priv_data;
int ret, len;
RTSPStream *rtsp_st, *first_queue_st = NULL;
int64_t wait_end = 0;
if (rt->nb_byes == rt->nb_rtsp_streams)
return AVERROR_EOF;
/* get next frames from the same RTP packet */
if (rt->cur_transport_priv) {
if (rt->transport == RTSP_TRANSPORT_RDT) {
ret = ff_rdt_parse_packet(rt->cur_transport_priv, pkt, NULL, 0);
} else if (rt->transport == RTSP_TRANSPORT_RTP) {
ret = ff_rtp_parse_packet(rt->cur_transport_priv, pkt, NULL, 0);
} else if (rt->ts && CONFIG_RTPDEC) {
ret = ff_mpegts_parse_packet(rt->ts, pkt, rt->recvbuf + rt->recvbuf_pos, rt->recvbuf_len - rt->recvbuf_pos);
if (ret >= 0) {
rt->recvbuf_pos += ret;
ret = rt->recvbuf_pos < rt->recvbuf_len;
}
} else
ret = -1;
if (ret == 0) {
rt->cur_transport_priv = NULL;
return 0;
} else if (ret == 1) {
return 0;
} else
rt->cur_transport_priv = NULL;
}
redo:
if (rt->transport == RTSP_TRANSPORT_RTP) {
int i;
int64_t first_queue_time = 0;
for (i = 0; i < rt->nb_rtsp_streams; i++) {
RTPDemuxContext *rtpctx = rt->rtsp_streams[i]->transport_priv;
int64_t queue_time;
if (!rtpctx)
continue;
queue_time = ff_rtp_queued_packet_time(rtpctx);
if (queue_time && (queue_time - first_queue_time < 0 ||
!first_queue_time)) {
first_queue_time = queue_time;
first_queue_st = rt->rtsp_streams[i];
}
}
if (first_queue_time) {
wait_end = first_queue_time + s->max_delay;
} else {
wait_end = 0;
first_queue_st = NULL;
}
}
/* read next RTP packet */
if (!rt->recvbuf) {
rt->recvbuf = av_malloc(RECVBUF_SIZE);
if (!rt->recvbuf)
return AVERROR(ENOMEM);
}
switch(rt->lower_transport) {
default:
#if CONFIG_RTSP_DEMUXER
case RTSP_LOWER_TRANSPORT_TCP:
len = ff_rtsp_tcp_read_packet(s, &rtsp_st, rt->recvbuf, RECVBUF_SIZE);
break;
#endif
case RTSP_LOWER_TRANSPORT_UDP:
case RTSP_LOWER_TRANSPORT_UDP_MULTICAST:
len = udp_read_packet(s, &rtsp_st, rt->recvbuf, RECVBUF_SIZE, wait_end);
if (len > 0 && rtsp_st->transport_priv && rt->transport == RTSP_TRANSPORT_RTP)
ff_rtp_check_and_send_back_rr(rtsp_st->transport_priv, rtsp_st->rtp_handle, NULL, len);
break;
case RTSP_LOWER_TRANSPORT_CUSTOM:
if (first_queue_st && rt->transport == RTSP_TRANSPORT_RTP &&
wait_end && wait_end < av_gettime_relative())
len = AVERROR(EAGAIN);
else
len = ffio_read_partial(s->pb, rt->recvbuf, RECVBUF_SIZE);
len = pick_stream(s, &rtsp_st, rt->recvbuf, len);
if (len > 0 && rtsp_st->transport_priv && rt->transport == RTSP_TRANSPORT_RTP)
ff_rtp_check_and_send_back_rr(rtsp_st->transport_priv, NULL, s->pb, len);
break;
}
if (len == AVERROR(EAGAIN) && first_queue_st &&
rt->transport == RTSP_TRANSPORT_RTP) {
rtsp_st = first_queue_st;
ret = ff_rtp_parse_packet(rtsp_st->transport_priv, pkt, NULL, 0);
goto end;
}
if (len < 0)
return len;
if (len == 0)
return AVERROR_EOF;
if (rt->transport == RTSP_TRANSPORT_RDT) {
ret = ff_rdt_parse_packet(rtsp_st->transport_priv, pkt, &rt->recvbuf, len);
} else if (rt->transport == RTSP_TRANSPORT_RTP) {
ret = ff_rtp_parse_packet(rtsp_st->transport_priv, pkt, &rt->recvbuf, len);
if (rtsp_st->feedback) {
AVIOContext *pb = NULL;
if (rt->lower_transport == RTSP_LOWER_TRANSPORT_CUSTOM)
pb = s->pb;
ff_rtp_send_rtcp_feedback(rtsp_st->transport_priv, rtsp_st->rtp_handle, pb);
}
if (ret < 0) {
/* Either bad packet, or a RTCP packet. Check if the
* first_rtcp_ntp_time field was initialized. */
RTPDemuxContext *rtpctx = rtsp_st->transport_priv;
if (rtpctx->first_rtcp_ntp_time != AV_NOPTS_VALUE) {
/* first_rtcp_ntp_time has been initialized for this stream,
* copy the same value to all other uninitialized streams,
* in order to map their timestamp origin to the same ntp time
* as this one. */
int i;
AVStream *st = NULL;
if (rtsp_st->stream_index >= 0)
st = s->streams[rtsp_st->stream_index];
for (i = 0; i < rt->nb_rtsp_streams; i++) {
RTPDemuxContext *rtpctx2 = rt->rtsp_streams[i]->transport_priv;
AVStream *st2 = NULL;
if (rt->rtsp_streams[i]->stream_index >= 0)
st2 = s->streams[rt->rtsp_streams[i]->stream_index];
if (rtpctx2 && st && st2 &&
rtpctx2->first_rtcp_ntp_time == AV_NOPTS_VALUE) {
rtpctx2->first_rtcp_ntp_time = rtpctx->first_rtcp_ntp_time;
rtpctx2->rtcp_ts_offset = av_rescale_q(
rtpctx->rtcp_ts_offset, st->time_base,
st2->time_base);
}
}
}
if (ret == -RTCP_BYE) {
rt->nb_byes++;
av_log(s, AV_LOG_DEBUG, "Received BYE for stream %d (%d/%d)\n",
rtsp_st->stream_index, rt->nb_byes, rt->nb_rtsp_streams);
if (rt->nb_byes == rt->nb_rtsp_streams)
return AVERROR_EOF;
}
}
} else if (rt->ts && CONFIG_RTPDEC) {
ret = ff_mpegts_parse_packet(rt->ts, pkt, rt->recvbuf, len);
if (ret >= 0) {
if (ret < len) {
rt->recvbuf_len = len;
rt->recvbuf_pos = ret;
rt->cur_transport_priv = rt->ts;
return 1;
} else {
ret = 0;
}
}
} else {
return AVERROR_INVALIDDATA;
}
end:
if (ret < 0)
goto redo;
if (ret == 1)
/* more packets may follow, so we save the RTP context */
rt->cur_transport_priv = rtsp_st->transport_priv;
return ret;
}
| 4,779 |
qemu | 82d07945652f16078b172d2bd46659e8f5f30d8e | 1 | static void assign_failed_examine(AssignedDevice *dev)
{
char name[PATH_MAX], dir[PATH_MAX], driver[PATH_MAX] = {}, *ns;
uint16_t vendor_id, device_id;
int r;
snprintf(dir, sizeof(dir), "/sys/bus/pci/devices/%04x:%02x:%02x.%01x/",
dev->host.domain, dev->host.bus, dev->host.slot,
dev->host.function);
snprintf(name, sizeof(name), "%sdriver", dir);
r = readlink(name, driver, sizeof(driver));
if ((r <= 0) || r >= sizeof(driver)) {
goto fail;
}
ns = strrchr(driver, '/');
if (!ns) {
goto fail;
}
ns++;
if (get_real_vendor_id(dir, &vendor_id) ||
get_real_device_id(dir, &device_id)) {
goto fail;
}
error_printf("*** The driver '%s' is occupying your device "
"%04x:%02x:%02x.%x.\n"
"***\n"
"*** You can try the following commands to free it:\n"
"***\n"
"*** $ echo \"%04x %04x\" > /sys/bus/pci/drivers/pci-stub/new_id\n"
"*** $ echo \"%04x:%02x:%02x.%x\" > /sys/bus/pci/drivers/%s/unbind\n"
"*** $ echo \"%04x:%02x:%02x.%x\" > /sys/bus/pci/drivers/"
"pci-stub/bind\n"
"*** $ echo \"%04x %04x\" > /sys/bus/pci/drivers/pci-stub/remove_id\n"
"***",
ns, dev->host.domain, dev->host.bus, dev->host.slot,
dev->host.function, vendor_id, device_id,
dev->host.domain, dev->host.bus, dev->host.slot, dev->host.function,
ns, dev->host.domain, dev->host.bus, dev->host.slot,
dev->host.function, vendor_id, device_id);
return;
fail:
error_report("Couldn't find out why.");
} | 4,780 |
FFmpeg | 2ed0f76655a76cc49f8a1a1d59e545f5906e7924 | 1 | av_cold void ff_h264_free_context(H264Context *h)
{
int i;
free_tables(h); //FIXME cleanup init stuff perhaps
for(i = 0; i < MAX_SPS_COUNT; i++)
av_freep(h->sps_buffers + i);
for(i = 0; i < MAX_PPS_COUNT; i++)
av_freep(h->pps_buffers + i);
}
| 4,782 |
qemu | 94e7340b5db8bce7866e44e700ffa8fd26585c7e | 1 | static int nbd_co_receive_request(NBDRequest *req, struct nbd_request *request)
{
NBDClient *client = req->client;
int csock = client->sock;
int rc;
client->recv_coroutine = qemu_coroutine_self();
if (nbd_receive_request(csock, request) == -1) {
rc = -EIO;
goto out;
}
if (request->len > NBD_BUFFER_SIZE) {
LOG("len (%u) is larger than max len (%u)",
request->len, NBD_BUFFER_SIZE);
rc = -EINVAL;
goto out;
}
if ((request->from + request->len) < request->from) {
LOG("integer overflow detected! "
"you're probably being attacked");
rc = -EINVAL;
goto out;
}
TRACE("Decoding type");
if ((request->type & NBD_CMD_MASK_COMMAND) == NBD_CMD_WRITE) {
TRACE("Reading %u byte(s)", request->len);
if (qemu_co_recv(csock, req->data, request->len) != request->len) {
LOG("reading from socket failed");
rc = -EIO;
goto out;
}
}
rc = 0;
out:
client->recv_coroutine = NULL;
return rc;
}
| 4,784 |
FFmpeg | 136f55207521f0b03194ef5b55ba70f1635d6aee | 1 | static inline int hpel_motion(MpegEncContext *s,
uint8_t *dest, uint8_t *src,
int src_x, int src_y,
op_pixels_func *pix_op,
int motion_x, int motion_y)
{
int dxy = 0;
int emu = 0;
src_x += motion_x >> 1;
src_y += motion_y >> 1;
/* WARNING: do no forget half pels */
src_x = av_clip(src_x, -16, s->width); // FIXME unneeded for emu?
if (src_x != s->width)
dxy |= motion_x & 1;
src_y = av_clip(src_y, -16, s->height);
if (src_y != s->height)
dxy |= (motion_y & 1) << 1;
src += src_y * s->linesize + src_x;
if (s->unrestricted_mv) {
if ((unsigned)src_x > FFMAX(s->h_edge_pos - (motion_x & 1) - 8, 0) ||
(unsigned)src_y > FFMAX(s->v_edge_pos - (motion_y & 1) - 8, 0)) {
s->vdsp.emulated_edge_mc(s->sc.edge_emu_buffer, src,
s->linesize, s->linesize,
9, 9,
src_x, src_y, s->h_edge_pos,
s->v_edge_pos);
src = s->sc.edge_emu_buffer;
emu = 1;
}
}
pix_op[dxy](dest, src, s->linesize, 8);
return emu;
}
| 4,785 |
FFmpeg | ca16618b01abfde44b4eaf92dc89b01aa1b4a91e | 0 | static void inline xan_wc3_copy_pixel_run(XanContext *s,
int x, int y, int pixel_count, int motion_x, int motion_y)
{
int stride;
int line_inc;
int curframe_index, prevframe_index;
int curframe_x, prevframe_x;
int width = s->avctx->width;
unsigned char *palette_plane, *prev_palette_plane;
unsigned char *y_plane, *u_plane, *v_plane;
unsigned char *prev_y_plane, *prev_u_plane, *prev_v_plane;
unsigned char *rgb_plane, *prev_rgb_plane;
unsigned short *rgb16_plane, *prev_rgb16_plane;
unsigned int *rgb32_plane, *prev_rgb32_plane;
switch (s->avctx->pix_fmt) {
case PIX_FMT_PAL8:
palette_plane = s->current_frame.data[0];
prev_palette_plane = s->last_frame.data[0];
stride = s->current_frame.linesize[0];
line_inc = stride - width;
curframe_index = y * stride + x;
curframe_x = x;
prevframe_index = (y + motion_y) * stride + x + motion_x;
prevframe_x = x + motion_x;
while(pixel_count--) {
palette_plane[curframe_index++] =
prev_palette_plane[prevframe_index++];
ADVANCE_CURFRAME_X();
ADVANCE_PREVFRAME_X();
}
break;
case PIX_FMT_RGB555:
case PIX_FMT_RGB565:
rgb16_plane = (unsigned short *)s->current_frame.data[0];
prev_rgb16_plane = (unsigned short *)s->last_frame.data[0];
stride = s->current_frame.linesize[0] / 2;
line_inc = stride - width;
curframe_index = y * stride + x;
curframe_x = x;
prevframe_index = (y + motion_y) * stride + x + motion_x;
prevframe_x = x + motion_x;
while(pixel_count--) {
rgb16_plane[curframe_index++] =
prev_rgb16_plane[prevframe_index++];
ADVANCE_CURFRAME_X();
ADVANCE_PREVFRAME_X();
}
break;
case PIX_FMT_RGB24:
case PIX_FMT_BGR24:
rgb_plane = s->current_frame.data[0];
prev_rgb_plane = s->last_frame.data[0];
stride = s->current_frame.linesize[0];
line_inc = stride - width * 3;
curframe_index = y * stride + x * 3;
curframe_x = x;
prevframe_index = (y + motion_y) * stride +
(3 * (x + motion_x));
prevframe_x = x + motion_x;
while(pixel_count--) {
rgb_plane[curframe_index++] = prev_rgb_plane[prevframe_index++];
rgb_plane[curframe_index++] = prev_rgb_plane[prevframe_index++];
rgb_plane[curframe_index++] = prev_rgb_plane[prevframe_index++];
ADVANCE_CURFRAME_X();
ADVANCE_PREVFRAME_X();
}
break;
case PIX_FMT_RGBA32:
rgb32_plane = (unsigned int *)s->current_frame.data[0];
prev_rgb32_plane = (unsigned int *)s->last_frame.data[0];
stride = s->current_frame.linesize[0] / 4;
line_inc = stride - width;
curframe_index = y * stride + x;
curframe_x = x;
prevframe_index = (y + motion_y) * stride + x + motion_x;
prevframe_x = x + motion_x;
while(pixel_count--) {
rgb32_plane[curframe_index++] =
prev_rgb32_plane[prevframe_index++];
ADVANCE_CURFRAME_X();
ADVANCE_PREVFRAME_X();
}
break;
case PIX_FMT_YUV444P:
y_plane = s->current_frame.data[0];
u_plane = s->current_frame.data[1];
v_plane = s->current_frame.data[2];
prev_y_plane = s->last_frame.data[0];
prev_u_plane = s->last_frame.data[1];
prev_v_plane = s->last_frame.data[2];
stride = s->current_frame.linesize[0];
line_inc = stride - width;
curframe_index = y * stride + x;
curframe_x = x;
prevframe_index = (y + motion_y) * stride + x + motion_x;
prevframe_x = x + motion_x;
while(pixel_count--) {
y_plane[curframe_index] = prev_y_plane[prevframe_index];
u_plane[curframe_index] = prev_u_plane[prevframe_index];
v_plane[curframe_index] = prev_v_plane[prevframe_index];
curframe_index++;
ADVANCE_CURFRAME_X();
prevframe_index++;
ADVANCE_PREVFRAME_X();
}
break;
default:
av_log(s->avctx, AV_LOG_ERROR, " Xan WC3: Unhandled colorspace\n");
break;
}
}
| 4,786 |
FFmpeg | e2b54464c6a9de5d6b9ad4307696b0215d5e05a4 | 0 | static int set_format(void *obj, const char *name, int fmt, int search_flags,
enum AVOptionType type, const char *desc, int nb_fmts)
{
void *target_obj;
const AVOption *o = av_opt_find2(obj, name, NULL, 0,
search_flags, &target_obj);
int min, max;
const AVClass *class = *(AVClass **)obj;
if (!o || !target_obj)
return AVERROR_OPTION_NOT_FOUND;
if (o->type != type) {
av_log(obj, AV_LOG_ERROR,
"The value set by option '%s' is not a %s format", name, desc);
return AVERROR(EINVAL);
}
#if LIBAVUTIL_VERSION_MAJOR < 54
if (class->version && class->version < AV_VERSION_INT(52, 11, 100)) {
min = -1;
max = nb_fmts-1;
} else
#endif
{
min = FFMIN(o->min, -1);
max = FFMAX(o->max, nb_fmts-1);
}
if (fmt < min || fmt > max) {
av_log(obj, AV_LOG_ERROR,
"Value %d for parameter '%s' out of %s format range [%d - %d]\n",
fmt, name, desc, min, max);
return AVERROR(ERANGE);
}
*(int *)(((uint8_t *)target_obj) + o->offset) = fmt;
return 0;
}
| 4,787 |
FFmpeg | 6796a1dd8c14843b77925cb83a3ef88706ae1dd0 | 0 | void ff_put_h264_qpel4_mc01_msa(uint8_t *dst, const uint8_t *src,
ptrdiff_t stride)
{
avc_luma_vt_qrt_4w_msa(src - (stride * 2), stride, dst, stride, 4, 0);
}
| 4,788 |
FFmpeg | 702200358197a0ea5ea82d1d6540c785bb04fae4 | 0 | static void dump_cook_context(COOKContext *q, COOKextradata *e)
{
//int i=0;
#define PRINT(a,b) av_log(NULL,AV_LOG_ERROR," %s = %d\n", a, b);
av_log(NULL,AV_LOG_ERROR,"COOKextradata\n");
av_log(NULL,AV_LOG_ERROR,"cookversion=%x\n",e->cookversion);
if (e->cookversion > MONO_COOK2) {
PRINT("js_subband_start",e->js_subband_start);
PRINT("js_vlc_bits",e->js_vlc_bits);
}
av_log(NULL,AV_LOG_ERROR,"COOKContext\n");
PRINT("nb_channels",q->nb_channels);
PRINT("bit_rate",q->bit_rate);
PRINT("sample_rate",q->sample_rate);
PRINT("samples_per_channel",q->samples_per_channel);
PRINT("samples_per_frame",q->samples_per_frame);
PRINT("subbands",q->subbands);
PRINT("random_state",q->random_state);
PRINT("mlt_size",q->mlt_size);
PRINT("js_subband_start",q->js_subband_start);
PRINT("numvector_bits",q->numvector_bits);
PRINT("numvector_size",q->numvector_size);
PRINT("total_subbands",q->total_subbands);
PRINT("frame_reorder_counter",q->frame_reorder_counter);
PRINT("frame_reorder_index_size",q->frame_reorder_index_size);
}
| 4,789 |
FFmpeg | e33d3720239314d28a48c64c1071ba9c048280d1 | 0 | static void ffm_set_write_index(AVFormatContext *s, int64_t pos,
int64_t file_size)
{
FFMContext *ffm = s->priv_data;
ffm->write_index = pos;
ffm->file_size = file_size;
}
| 4,790 |
qemu | cd7bc87868d534f95e928cad98e2a52df7695771 | 1 | static void usb_msd_class_initfn_bot(ObjectClass *klass, void *data)
{
USBDeviceClass *uc = USB_DEVICE_CLASS(klass);
uc->realize = usb_msd_realize_bot;
uc->attached_settable = true;
} | 4,791 |
qemu | 8d04fb55dec381bc5105cb47f29d918e579e8cbd | 1 | uint64_t HELPER(get_cp_reg64)(CPUARMState *env, void *rip)
{
const ARMCPRegInfo *ri = rip;
return ri->readfn(env, ri);
}
| 4,792 |
qemu | f3b2bea3c76ba9283b957f1373e7cebdbf863059 | 1 | static USBDevice *usb_try_create_simple(USBBus *bus, const char *name,
Error **errp)
{
Error *err = NULL;
USBDevice *dev;
dev = USB_DEVICE(qdev_try_create(&bus->qbus, name));
if (!dev) {
error_setg(errp, "Failed to create USB device '%s'", name);
return NULL;
}
object_property_set_bool(OBJECT(dev), true, "realized", &err);
if (err) {
error_propagate(errp, err);
error_prepend(errp, "Failed to initialize USB device '%s': ",
name);
object_unparent(OBJECT(dev));
return NULL;
}
return dev;
}
| 4,793 |
FFmpeg | f1d8763a02b5fce9a7d9789e049d74a45b15e1e8 | 1 | static int decode_chunks(AVCodecContext *avctx,
AVFrame *picture, int *got_output,
const uint8_t *buf, int buf_size)
{
Mpeg1Context *s = avctx->priv_data;
MpegEncContext *s2 = &s->mpeg_enc_ctx;
const uint8_t *buf_ptr = buf;
const uint8_t *buf_end = buf + buf_size;
int ret, input_size;
int last_code = 0;
for (;;) {
/* find next start code */
uint32_t start_code = -1;
buf_ptr = avpriv_mpv_find_start_code(buf_ptr, buf_end, &start_code);
if (start_code > 0x1ff) {
if (s2->pict_type != AV_PICTURE_TYPE_B || avctx->skip_frame <= AVDISCARD_DEFAULT) {
if (HAVE_THREADS && (avctx->active_thread_type & FF_THREAD_SLICE)) {
int i;
avctx->execute(avctx, slice_decode_thread, &s2->thread_context[0], NULL, s->slice_count, sizeof(void*));
for (i = 0; i < s->slice_count; i++)
s2->error_count += s2->thread_context[i]->error_count;
}
if (CONFIG_MPEG_VDPAU_DECODER && avctx->codec->capabilities & CODEC_CAP_HWACCEL_VDPAU)
ff_vdpau_mpeg_picture_complete(s2, buf, buf_size, s->slice_count);
if (slice_end(avctx, picture)) {
if (s2->last_picture_ptr || s2->low_delay) //FIXME merge with the stuff in mpeg_decode_slice
*got_output = 1;
}
}
s2->pict_type = 0;
return FFMAX(0, buf_ptr - buf - s2->parse_context.last_index);
}
input_size = buf_end - buf_ptr;
if (avctx->debug & FF_DEBUG_STARTCODE) {
av_log(avctx, AV_LOG_DEBUG, "%3X at %td left %d\n", start_code, buf_ptr-buf, input_size);
}
/* prepare data for next start code */
switch (start_code) {
case SEQ_START_CODE:
if (last_code == 0) {
mpeg1_decode_sequence(avctx, buf_ptr, input_size);
s->sync=1;
} else {
av_log(avctx, AV_LOG_ERROR, "ignoring SEQ_START_CODE after %X\n", last_code);
if (avctx->err_recognition & AV_EF_EXPLODE)
return AVERROR_INVALIDDATA;
}
break;
case PICTURE_START_CODE:
if (HAVE_THREADS && (avctx->active_thread_type & FF_THREAD_SLICE) && s->slice_count) {
int i;
avctx->execute(avctx, slice_decode_thread,
s2->thread_context, NULL,
s->slice_count, sizeof(void*));
for (i = 0; i < s->slice_count; i++)
s2->error_count += s2->thread_context[i]->error_count;
s->slice_count = 0;
}
if (last_code == 0 || last_code == SLICE_MIN_START_CODE) {
ret = mpeg_decode_postinit(avctx);
if (ret < 0) {
av_log(avctx, AV_LOG_ERROR, "mpeg_decode_postinit() failure\n");
return ret;
}
/* we have a complete image: we try to decompress it */
if (mpeg1_decode_picture(avctx, buf_ptr, input_size) < 0)
s2->pict_type = 0;
s2->first_slice = 1;
last_code = PICTURE_START_CODE;
} else {
av_log(avctx, AV_LOG_ERROR, "ignoring pic after %X\n", last_code);
if (avctx->err_recognition & AV_EF_EXPLODE)
return AVERROR_INVALIDDATA;
}
break;
case EXT_START_CODE:
init_get_bits(&s2->gb, buf_ptr, input_size*8);
switch (get_bits(&s2->gb, 4)) {
case 0x1:
if (last_code == 0) {
mpeg_decode_sequence_extension(s);
} else {
av_log(avctx, AV_LOG_ERROR, "ignoring seq ext after %X\n", last_code);
if (avctx->err_recognition & AV_EF_EXPLODE)
return AVERROR_INVALIDDATA;
}
break;
case 0x2:
mpeg_decode_sequence_display_extension(s);
break;
case 0x3:
mpeg_decode_quant_matrix_extension(s2);
break;
case 0x7:
mpeg_decode_picture_display_extension(s);
break;
case 0x8:
if (last_code == PICTURE_START_CODE) {
mpeg_decode_picture_coding_extension(s);
} else {
av_log(avctx, AV_LOG_ERROR, "ignoring pic cod ext after %X\n", last_code);
if (avctx->err_recognition & AV_EF_EXPLODE)
return AVERROR_INVALIDDATA;
}
break;
}
break;
case USER_START_CODE:
mpeg_decode_user_data(avctx, buf_ptr, input_size);
break;
case GOP_START_CODE:
if (last_code == 0) {
s2->first_field=0;
mpeg_decode_gop(avctx, buf_ptr, input_size);
s->sync=1;
} else {
av_log(avctx, AV_LOG_ERROR, "ignoring GOP_START_CODE after %X\n", last_code);
if (avctx->err_recognition & AV_EF_EXPLODE)
return AVERROR_INVALIDDATA;
}
break;
default:
if (start_code >= SLICE_MIN_START_CODE &&
start_code <= SLICE_MAX_START_CODE && last_code != 0) {
const int field_pic = s2->picture_structure != PICT_FRAME;
int mb_y = (start_code - SLICE_MIN_START_CODE) << field_pic;
last_code = SLICE_MIN_START_CODE;
if (s2->picture_structure == PICT_BOTTOM_FIELD)
mb_y++;
if (mb_y >= s2->mb_height) {
av_log(s2->avctx, AV_LOG_ERROR, "slice below image (%d >= %d)\n", mb_y, s2->mb_height);
return -1;
}
if (s2->last_picture_ptr == NULL) {
/* Skip B-frames if we do not have reference frames and gop is not closed */
if (s2->pict_type == AV_PICTURE_TYPE_B) {
if (!s->closed_gop)
break;
}
}
if (s2->pict_type == AV_PICTURE_TYPE_I)
s->sync=1;
if (s2->next_picture_ptr == NULL) {
/* Skip P-frames if we do not have a reference frame or we have an invalid header. */
if (s2->pict_type == AV_PICTURE_TYPE_P && !s->sync) break;
}
if ((avctx->skip_frame >= AVDISCARD_NONREF && s2->pict_type == AV_PICTURE_TYPE_B) ||
(avctx->skip_frame >= AVDISCARD_NONKEY && s2->pict_type != AV_PICTURE_TYPE_I) ||
avctx->skip_frame >= AVDISCARD_ALL)
break;
if (!s->mpeg_enc_ctx_allocated)
break;
if (s2->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
if (mb_y < avctx->skip_top || mb_y >= s2->mb_height - avctx->skip_bottom)
break;
}
if (!s2->pict_type) {
av_log(avctx, AV_LOG_ERROR, "Missing picture start code\n");
if (avctx->err_recognition & AV_EF_EXPLODE)
return AVERROR_INVALIDDATA;
break;
}
if (s2->first_slice) {
s2->first_slice = 0;
if (mpeg_field_start(s2, buf, buf_size) < 0)
return -1;
}
if (!s2->current_picture_ptr) {
av_log(avctx, AV_LOG_ERROR, "current_picture not initialized\n");
return AVERROR_INVALIDDATA;
}
if (avctx->codec->capabilities & CODEC_CAP_HWACCEL_VDPAU) {
s->slice_count++;
break;
}
if (HAVE_THREADS && (avctx->active_thread_type & FF_THREAD_SLICE)) {
int threshold = (s2->mb_height * s->slice_count +
s2->slice_context_count / 2) /
s2->slice_context_count;
if (threshold <= mb_y) {
MpegEncContext *thread_context = s2->thread_context[s->slice_count];
thread_context->start_mb_y = mb_y;
thread_context->end_mb_y = s2->mb_height;
if (s->slice_count) {
s2->thread_context[s->slice_count-1]->end_mb_y = mb_y;
ff_update_duplicate_context(thread_context, s2);
}
init_get_bits(&thread_context->gb, buf_ptr, input_size*8);
s->slice_count++;
}
buf_ptr += 2; // FIXME add minimum number of bytes per slice
} else {
ret = mpeg_decode_slice(s2, mb_y, &buf_ptr, input_size);
emms_c();
if (ret < 0) {
if (avctx->err_recognition & AV_EF_EXPLODE)
return ret;
if (s2->resync_mb_x >= 0 && s2->resync_mb_y >= 0)
ff_er_add_slice(s2, s2->resync_mb_x, s2->resync_mb_y, s2->mb_x, s2->mb_y, ER_AC_ERROR | ER_DC_ERROR | ER_MV_ERROR);
} else {
ff_er_add_slice(s2, s2->resync_mb_x, s2->resync_mb_y, s2->mb_x-1, s2->mb_y, ER_AC_END | ER_DC_END | ER_MV_END);
}
}
}
break;
}
}
}
| 4,795 |
FFmpeg | 6ca82975b7a8eaf676a52738ec8e7e36732327cc | 1 | static inline int mdec_decode_block_intra(MDECContext *a, int16_t *block, int n)
{
int level, diff, i, j, run;
int component;
RLTable *rl = &ff_rl_mpeg1;
uint8_t * const scantable = a->scantable.permutated;
const uint16_t *quant_matrix = ff_mpeg1_default_intra_matrix;
const int qscale = a->qscale;
/* DC coefficient */
if (a->version == 2) {
block[0] = 2 * get_sbits(&a->gb, 10) + 1024;
} else {
component = (n <= 3 ? 0 : n - 4 + 1);
diff = decode_dc(&a->gb, component);
if (diff >= 0xffff)
return AVERROR_INVALIDDATA;
a->last_dc[component] += diff;
block[0] = a->last_dc[component] << 3;
}
i = 0;
{
OPEN_READER(re, &a->gb);
/* now quantify & encode AC coefficients */
for (;;) {
UPDATE_CACHE(re, &a->gb);
GET_RL_VLC(level, run, re, &a->gb, rl->rl_vlc[0], TEX_VLC_BITS, 2, 0);
if (level == 127) {
break;
} else if (level != 0) {
i += run;
if (i > 63) {
av_log(a->avctx, AV_LOG_ERROR,
"ac-tex damaged at %d %d\n", a->mb_x, a->mb_y);
return AVERROR_INVALIDDATA;
}
j = scantable[i];
level = (level * qscale * quant_matrix[j]) >> 3;
level = (level ^ SHOW_SBITS(re, &a->gb, 1)) - SHOW_SBITS(re, &a->gb, 1);
LAST_SKIP_BITS(re, &a->gb, 1);
} else {
/* escape */
run = SHOW_UBITS(re, &a->gb, 6)+1; LAST_SKIP_BITS(re, &a->gb, 6);
UPDATE_CACHE(re, &a->gb);
level = SHOW_SBITS(re, &a->gb, 10); SKIP_BITS(re, &a->gb, 10);
i += run;
if (i > 63) {
av_log(a->avctx, AV_LOG_ERROR,
"ac-tex damaged at %d %d\n", a->mb_x, a->mb_y);
return AVERROR_INVALIDDATA;
}
j = scantable[i];
if (level < 0) {
level = -level;
level = (level * qscale * quant_matrix[j]) >> 3;
level = (level - 1) | 1;
level = -level;
} else {
level = (level * qscale * quant_matrix[j]) >> 3;
level = (level - 1) | 1;
}
}
block[j] = level;
}
CLOSE_READER(re, &a->gb);
}
a->block_last_index[n] = i;
return 0;
}
| 4,796 |
qemu | 7c9e527659c67d4d7b41d9504f93d2d7ee482488 | 1 | static int raw_open_common(BlockDriverState *bs, QDict *options,
int bdrv_flags, int open_flags, Error **errp)
{
BDRVRawState *s = bs->opaque;
QemuOpts *opts;
Error *local_err = NULL;
const char *filename = NULL;
const char *str;
BlockdevAioOptions aio, aio_default;
int fd, ret;
struct stat st;
OnOffAuto locking;
opts = qemu_opts_create(&raw_runtime_opts, NULL, 0, &error_abort);
qemu_opts_absorb_qdict(opts, options, &local_err);
filename = qemu_opt_get(opts, "filename");
ret = raw_normalize_devicepath(&filename);
if (ret != 0) {
error_setg_errno(errp, -ret, "Could not normalize device path");
aio_default = (bdrv_flags & BDRV_O_NATIVE_AIO)
? BLOCKDEV_AIO_OPTIONS_NATIVE
: BLOCKDEV_AIO_OPTIONS_THREADS;
aio = qapi_enum_parse(&BlockdevAioOptions_lookup,
qemu_opt_get(opts, "aio"),
aio_default, &local_err);
s->use_linux_aio = (aio == BLOCKDEV_AIO_OPTIONS_NATIVE);
locking = qapi_enum_parse(&OnOffAuto_lookup,
qemu_opt_get(opts, "locking"),
ON_OFF_AUTO_AUTO, &local_err);
switch (locking) {
case ON_OFF_AUTO_ON:
s->use_lock = true;
if (!qemu_has_ofd_lock()) {
fprintf(stderr,
"File lock requested but OFD locking syscall is "
"unavailable, falling back to POSIX file locks.\n"
"Due to the implementation, locks can be lost "
"unexpectedly.\n");
break;
case ON_OFF_AUTO_OFF:
s->use_lock = false;
break;
case ON_OFF_AUTO_AUTO:
s->use_lock = qemu_has_ofd_lock();
break;
default:
abort();
s->open_flags = open_flags;
raw_parse_flags(bdrv_flags, &s->open_flags);
s->fd = -1;
fd = qemu_open(filename, s->open_flags, 0644);
if (fd < 0) {
ret = -errno;
error_setg_errno(errp, errno, "Could not open '%s'", filename);
if (ret == -EROFS) {
ret = -EACCES;
s->fd = fd;
s->lock_fd = -1;
if (s->use_lock) {
fd = qemu_open(filename, s->open_flags);
if (fd < 0) {
ret = -errno;
error_setg_errno(errp, errno, "Could not open '%s' for locking",
filename);
qemu_close(s->fd);
s->lock_fd = fd;
s->perm = 0;
s->shared_perm = BLK_PERM_ALL;
#ifdef CONFIG_LINUX_AIO
/* Currently Linux does AIO only for files opened with O_DIRECT */
if (s->use_linux_aio && !(s->open_flags & O_DIRECT)) {
error_setg(errp, "aio=native was specified, but it requires "
"cache.direct=on, which was not specified.");
#else
if (s->use_linux_aio) {
error_setg(errp, "aio=native was specified, but is not supported "
"in this build.");
#endif /* !defined(CONFIG_LINUX_AIO) */
s->has_discard = true;
s->has_write_zeroes = true;
bs->supported_zero_flags = BDRV_REQ_MAY_UNMAP;
if ((bs->open_flags & BDRV_O_NOCACHE) != 0) {
s->needs_alignment = true;
if (fstat(s->fd, &st) < 0) {
ret = -errno;
error_setg_errno(errp, errno, "Could not stat file");
if (S_ISREG(st.st_mode)) {
s->discard_zeroes = true;
s->has_fallocate = true;
if (S_ISBLK(st.st_mode)) {
#ifdef BLKDISCARDZEROES
unsigned int arg;
if (ioctl(s->fd, BLKDISCARDZEROES, &arg) == 0 && arg) {
s->discard_zeroes = true;
#endif
#ifdef __linux__
/* On Linux 3.10, BLKDISCARD leaves stale data in the page cache. Do
* not rely on the contents of discarded blocks unless using O_DIRECT.
* Same for BLKZEROOUT.
*/
if (!(bs->open_flags & BDRV_O_NOCACHE)) {
s->discard_zeroes = false;
s->has_write_zeroes = false;
#endif
#ifdef __FreeBSD__
if (S_ISCHR(st.st_mode)) {
/*
* The file is a char device (disk), which on FreeBSD isn't behind
* a pager, so force all requests to be aligned. This is needed
* so QEMU makes sure all IO operations on the device are aligned
* to sector size, or else FreeBSD will reject them with EINVAL.
*/
s->needs_alignment = true;
#endif
#ifdef CONFIG_XFS
if (platform_test_xfs_fd(s->fd)) {
s->is_xfs = true;
#endif
ret = 0;
fail:
if (filename && (bdrv_flags & BDRV_O_TEMPORARY)) {
unlink(filename);
qemu_opts_del(opts);
return ret; | 4,797 |
FFmpeg | ac4b32df71bd932838043a4838b86d11e169707f | 1 | av_cold int ff_vp8_decode_init(AVCodecContext *avctx)
{
VP8Context *s = avctx->priv_data;
int ret;
s->avctx = avctx;
avctx->pix_fmt = AV_PIX_FMT_YUV420P;
avctx->internal->allocate_progress = 1;
ff_videodsp_init(&s->vdsp, 8);
ff_h264_pred_init(&s->hpc, AV_CODEC_ID_VP8, 8, 1);
ff_vp8dsp_init(&s->vp8dsp);
if ((ret = vp8_init_frames(s)) < 0) {
ff_vp8_decode_free(avctx);
return ret;
}
return 0;
}
| 4,799 |
FFmpeg | 2f3b028c7117e03267ea7f88d0d612e70f1afc06 | 1 | static inline void json_print_item_str(WriterContext *wctx,
const char *key, const char *value,
const char *indent)
{
char *key_esc = json_escape_str(key);
char *value_esc = json_escape_str(value);
printf("%s\"%s\": \"%s\"", indent,
key_esc ? key_esc : "",
value_esc ? value_esc : "");
av_free(key_esc);
av_free(value_esc);
}
| 4,802 |
qemu | 56f289f383a871e871f944c7226920b35794efe6 | 1 | static void handle_keydown(SDL_Event *ev)
{
int mod_state, win;
struct sdl2_console *scon = get_scon_from_window(ev->key.windowID);
if (alt_grab) {
mod_state = (SDL_GetModState() & (gui_grab_code | KMOD_LSHIFT)) ==
(gui_grab_code | KMOD_LSHIFT);
} else if (ctrl_grab) {
mod_state = (SDL_GetModState() & KMOD_RCTRL) == KMOD_RCTRL;
} else {
mod_state = (SDL_GetModState() & gui_grab_code) == gui_grab_code;
gui_key_modifier_pressed = mod_state;
if (gui_key_modifier_pressed) {
switch (ev->key.keysym.scancode) {
case SDL_SCANCODE_2:
case SDL_SCANCODE_3:
case SDL_SCANCODE_4:
case SDL_SCANCODE_5:
case SDL_SCANCODE_6:
case SDL_SCANCODE_7:
case SDL_SCANCODE_8:
case SDL_SCANCODE_9:
win = ev->key.keysym.scancode - SDL_SCANCODE_1;
if (win < sdl2_num_outputs) {
sdl2_console[win].hidden = !sdl2_console[win].hidden;
if (sdl2_console[win].real_window) {
if (sdl2_console[win].hidden) {
SDL_HideWindow(sdl2_console[win].real_window);
} else {
SDL_ShowWindow(sdl2_console[win].real_window);
gui_keysym = 1;
break;
case SDL_SCANCODE_F:
toggle_full_screen(scon);
gui_keysym = 1;
break;
case SDL_SCANCODE_U:
sdl2_window_destroy(scon);
sdl2_window_create(scon);
if (!scon->opengl) {
/* re-create scon->texture */
sdl2_2d_switch(&scon->dcl, scon->surface);
gui_keysym = 1;
break;
#if 0
case SDL_SCANCODE_KP_PLUS:
case SDL_SCANCODE_KP_MINUS:
if (!gui_fullscreen) {
int scr_w, scr_h;
int width, height;
SDL_GetWindowSize(scon->real_window, &scr_w, &scr_h);
width = MAX(scr_w + (ev->key.keysym.scancode ==
SDL_SCANCODE_KP_PLUS ? 50 : -50),
160);
height = (surface_height(scon->surface) * width) /
surface_width(scon->surface);
fprintf(stderr, "%s: scale to %dx%d\n",
__func__, width, height);
sdl_scale(scon, width, height);
sdl2_redraw(scon);
gui_keysym = 1;
#endif
default:
break;
if (!gui_keysym) {
sdl2_process_key(scon, &ev->key); | 4,803 |
FFmpeg | 5e6439a12508f8f7f30aeef64eb96c2311b7f573 | 1 | AVRational ff_choose_timebase(AVFormatContext *s, AVStream *st, int min_precission)
{
AVRational q;
int j;
if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO) {
q = (AVRational){1, st->codec->sample_rate};
} else {
q = st->codec->time_base;
}
for (j=2; j<2000; j+= 1+(j>2))
while (q.den / q.num < min_precission && q.num % j == 0)
q.num /= j;
while (q.den / q.num < min_precission && q.den < (1<<24))
q.den <<= 1;
return q;
}
| 4,804 |
FFmpeg | 3426d575bf46edc0f52d15f7e7c1d199e8688faa | 1 | static int pcm_decode_frame(AVCodecContext *avctx,
void *data, int *data_size,
uint8_t *buf, int buf_size)
{
PCMDecode *s = avctx->priv_data;
int c, n;
short *samples;
uint8_t *src, *src2[MAX_CHANNELS];
samples = data;
src = buf;
n= av_get_bits_per_sample(avctx->codec_id)/8;
if((n && buf_size % n) || avctx->channels > MAX_CHANNELS){
av_log(avctx, AV_LOG_ERROR, "invalid PCM packet\n");
return -1;
}
buf_size= FFMIN(buf_size, *data_size/2);
*data_size=0;
n = buf_size/avctx->channels;
for(c=0;c<avctx->channels;c++)
src2[c] = &src[c*n];
switch(avctx->codec->id) {
case CODEC_ID_PCM_S32LE:
decode_to16(4, 1, 0, &src, &samples, buf_size);
break;
case CODEC_ID_PCM_S32BE:
decode_to16(4, 0, 0, &src, &samples, buf_size);
break;
case CODEC_ID_PCM_U32LE:
decode_to16(4, 1, 1, &src, &samples, buf_size);
break;
case CODEC_ID_PCM_U32BE:
decode_to16(4, 0, 1, &src, &samples, buf_size);
break;
case CODEC_ID_PCM_S24LE:
decode_to16(3, 1, 0, &src, &samples, buf_size);
break;
case CODEC_ID_PCM_S24BE:
decode_to16(3, 0, 0, &src, &samples, buf_size);
break;
case CODEC_ID_PCM_U24LE:
decode_to16(3, 1, 1, &src, &samples, buf_size);
break;
case CODEC_ID_PCM_U24BE:
decode_to16(3, 0, 1, &src, &samples, buf_size);
break;
case CODEC_ID_PCM_S24DAUD:
n = buf_size / 3;
for(;n>0;n--) {
uint32_t v = bytestream_get_be24(&src);
v >>= 4; // sync flags are here
*samples++ = ff_reverse[(v >> 8) & 0xff] +
(ff_reverse[v & 0xff] << 8);
}
break;
case CODEC_ID_PCM_S16LE:
n = buf_size >> 1;
for(;n>0;n--) {
*samples++ = bytestream_get_le16(&src);
}
break;
case CODEC_ID_PCM_S16LE_PLANAR:
for(n>>=1;n>0;n--)
for(c=0;c<avctx->channels;c++)
*samples++ = bytestream_get_le16(&src2[c]);
src = src2[avctx->channels-1];
break;
case CODEC_ID_PCM_S16BE:
n = buf_size >> 1;
for(;n>0;n--) {
*samples++ = bytestream_get_be16(&src);
}
break;
case CODEC_ID_PCM_U16LE:
n = buf_size >> 1;
for(;n>0;n--) {
*samples++ = bytestream_get_le16(&src) - 0x8000;
}
break;
case CODEC_ID_PCM_U16BE:
n = buf_size >> 1;
for(;n>0;n--) {
*samples++ = bytestream_get_be16(&src) - 0x8000;
}
break;
case CODEC_ID_PCM_S8:
n = buf_size;
for(;n>0;n--) {
*samples++ = *src++ << 8;
}
break;
case CODEC_ID_PCM_U8:
n = buf_size;
for(;n>0;n--) {
*samples++ = ((int)*src++ - 128) << 8;
}
break;
case CODEC_ID_PCM_ZORK:
n = buf_size;
for(;n>0;n--) {
int x= *src++;
if(x&128) x-= 128;
else x = -x;
*samples++ = x << 8;
}
break;
case CODEC_ID_PCM_ALAW:
case CODEC_ID_PCM_MULAW:
n = buf_size;
for(;n>0;n--) {
*samples++ = s->table[*src++];
}
break;
default:
return -1;
}
*data_size = (uint8_t *)samples - (uint8_t *)data;
return src - buf;
}
| 4,805 |
FFmpeg | 7f05c5cea04112471d8147487aa3b44141922d09 | 1 | static int h264_slice_init(H264Context *h, H264SliceContext *sl,
const H2645NAL *nal)
{
int i, j, ret = 0;
if (h->picture_idr && nal->type != H264_NAL_IDR_SLICE) {
av_log(h->avctx, AV_LOG_ERROR, "Invalid mix of IDR and non-IDR slices\n");
return AVERROR_INVALIDDATA;
}
av_assert1(h->mb_num == h->mb_width * h->mb_height);
if (sl->first_mb_addr << FIELD_OR_MBAFF_PICTURE(h) >= h->mb_num ||
sl->first_mb_addr >= h->mb_num) {
av_log(h->avctx, AV_LOG_ERROR, "first_mb_in_slice overflow\n");
return AVERROR_INVALIDDATA;
}
sl->resync_mb_x = sl->mb_x = sl->first_mb_addr % h->mb_width;
sl->resync_mb_y = sl->mb_y = (sl->first_mb_addr / h->mb_width) <<
FIELD_OR_MBAFF_PICTURE(h);
if (h->picture_structure == PICT_BOTTOM_FIELD)
sl->resync_mb_y = sl->mb_y = sl->mb_y + 1;
av_assert1(sl->mb_y < h->mb_height);
ret = ff_h264_build_ref_list(h, sl);
if (ret < 0)
return ret;
if (h->ps.pps->weighted_bipred_idc == 2 &&
sl->slice_type_nos == AV_PICTURE_TYPE_B) {
implicit_weight_table(h, sl, -1);
if (FRAME_MBAFF(h)) {
implicit_weight_table(h, sl, 0);
implicit_weight_table(h, sl, 1);
}
}
if (sl->slice_type_nos == AV_PICTURE_TYPE_B && !sl->direct_spatial_mv_pred)
ff_h264_direct_dist_scale_factor(h, sl);
ff_h264_direct_ref_list_init(h, sl);
if (h->avctx->skip_loop_filter >= AVDISCARD_ALL ||
(h->avctx->skip_loop_filter >= AVDISCARD_NONKEY &&
h->nal_unit_type != H264_NAL_IDR_SLICE) ||
(h->avctx->skip_loop_filter >= AVDISCARD_NONINTRA &&
sl->slice_type_nos != AV_PICTURE_TYPE_I) ||
(h->avctx->skip_loop_filter >= AVDISCARD_BIDIR &&
sl->slice_type_nos == AV_PICTURE_TYPE_B) ||
(h->avctx->skip_loop_filter >= AVDISCARD_NONREF &&
nal->ref_idc == 0))
sl->deblocking_filter = 0;
if (sl->deblocking_filter == 1 && h->nb_slice_ctx > 1) {
if (h->avctx->flags2 & AV_CODEC_FLAG2_FAST) {
/* Cheat slightly for speed:
* Do not bother to deblock across slices. */
sl->deblocking_filter = 2;
} else {
h->postpone_filter = 1;
}
}
sl->qp_thresh = 15 -
FFMIN(sl->slice_alpha_c0_offset, sl->slice_beta_offset) -
FFMAX3(0,
h->ps.pps->chroma_qp_index_offset[0],
h->ps.pps->chroma_qp_index_offset[1]) +
6 * (h->ps.sps->bit_depth_luma - 8);
sl->slice_num = ++h->current_slice;
if (sl->slice_num)
h->slice_row[(sl->slice_num-1)&(MAX_SLICES-1)]= sl->resync_mb_y;
if ( h->slice_row[sl->slice_num&(MAX_SLICES-1)] + 3 >= sl->resync_mb_y
&& h->slice_row[sl->slice_num&(MAX_SLICES-1)] <= sl->resync_mb_y
&& sl->slice_num >= MAX_SLICES) {
//in case of ASO this check needs to be updated depending on how we decide to assign slice numbers in this case
av_log(h->avctx, AV_LOG_WARNING, "Possibly too many slices (%d >= %d), increase MAX_SLICES and recompile if there are artifacts\n", sl->slice_num, MAX_SLICES);
}
for (j = 0; j < 2; j++) {
int id_list[16];
int *ref2frm = h->ref2frm[sl->slice_num & (MAX_SLICES - 1)][j];
for (i = 0; i < 16; i++) {
id_list[i] = 60;
if (j < sl->list_count && i < sl->ref_count[j] &&
sl->ref_list[j][i].parent->f->buf[0]) {
int k;
AVBuffer *buf = sl->ref_list[j][i].parent->f->buf[0]->buffer;
for (k = 0; k < h->short_ref_count; k++)
if (h->short_ref[k]->f->buf[0]->buffer == buf) {
id_list[i] = k;
break;
}
for (k = 0; k < h->long_ref_count; k++)
if (h->long_ref[k] && h->long_ref[k]->f->buf[0]->buffer == buf) {
id_list[i] = h->short_ref_count + k;
break;
}
}
}
ref2frm[0] =
ref2frm[1] = -1;
for (i = 0; i < 16; i++)
ref2frm[i + 2] = 4 * id_list[i] + (sl->ref_list[j][i].reference & 3);
ref2frm[18 + 0] =
ref2frm[18 + 1] = -1;
for (i = 16; i < 48; i++)
ref2frm[i + 4] = 4 * id_list[(i - 16) >> 1] +
(sl->ref_list[j][i].reference & 3);
}
if (h->avctx->debug & FF_DEBUG_PICT_INFO) {
av_log(h->avctx, AV_LOG_DEBUG,
"slice:%d %s mb:%d %c%s%s frame:%d poc:%d/%d ref:%d/%d qp:%d loop:%d:%d:%d weight:%d%s %s\n",
sl->slice_num,
(h->picture_structure == PICT_FRAME ? "F" : h->picture_structure == PICT_TOP_FIELD ? "T" : "B"),
sl->mb_y * h->mb_width + sl->mb_x,
av_get_picture_type_char(sl->slice_type),
sl->slice_type_fixed ? " fix" : "",
nal->type == H264_NAL_IDR_SLICE ? " IDR" : "",
h->poc.frame_num,
h->cur_pic_ptr->field_poc[0],
h->cur_pic_ptr->field_poc[1],
sl->ref_count[0], sl->ref_count[1],
sl->qscale,
sl->deblocking_filter,
sl->slice_alpha_c0_offset, sl->slice_beta_offset,
sl->pwt.use_weight,
sl->pwt.use_weight == 1 && sl->pwt.use_weight_chroma ? "c" : "",
sl->slice_type == AV_PICTURE_TYPE_B ? (sl->direct_spatial_mv_pred ? "SPAT" : "TEMP") : "");
}
return 0;
}
| 4,806 |
Subsets and Splits