Home
last modified time | relevance | path

Searched refs:frame (Results 451 - 475 of 1514) sorted by relevance

1...<<11121314151617181920>>...61

/third_party/ffmpeg/libavcodec/
H A Dmscc.c132 static int decode_frame(AVCodecContext *avctx, AVFrame *frame, in decode_frame() argument
146 if ((ret = ff_get_buffer(avctx, frame, 0)) < 0) in decode_frame()
154 frame->palette_has_changed = 1; in decode_frame()
161 memcpy(frame->data[1], s->pal, AVPALETTE_SIZE); in decode_frame()
200 memcpy(frame->data[0] + (avctx->height - j - 1) * frame->linesize[0], in decode_frame()
204 frame->key_frame = 1; in decode_frame()
205 frame->pict_type = AV_PICTURE_TYPE_I; in decode_frame()
H A Dmvcdec.c228 static int mvc_decode_frame(AVCodecContext *avctx, AVFrame *frame, in mvc_decode_frame() argument
235 if ((ret = ff_get_buffer(avctx, frame, 0)) < 0) in mvc_decode_frame()
240 ret = decode_mvc1(avctx, &gb, frame->data[0], in mvc_decode_frame()
241 avctx->width, avctx->height, frame->linesize[0]); in mvc_decode_frame()
243 ret = decode_mvc2(avctx, &gb, frame->data[0], in mvc_decode_frame()
244 avctx->width, avctx->height, frame->linesize[0], in mvc_decode_frame()
249 frame->pict_type = AV_PICTURE_TYPE_I; in mvc_decode_frame()
250 frame->key_frame = 1; in mvc_decode_frame()
H A Dv4l2_m2m_enc.c198 v4l2_set_ext_ctrl(s, MPEG_CID(FRAME_RC_ENABLE), 1, "frame level rate control", 0); in v4l2_prepare_encoder()
202 "Encoder Context: id (%d), profile (%d), frame rate(%d/%d), number b-frames (%d), " in v4l2_prepare_encoder()
274 static int v4l2_send_frame(AVCodecContext *avctx, const AVFrame *frame) in v4l2_send_frame() argument
280 if (frame && frame->pict_type == AV_PICTURE_TYPE_I) in v4l2_send_frame()
281 v4l2_set_ext_ctrl(s, MPEG_CID(FORCE_KEY_FRAME), 0, "force key frame", 1); in v4l2_send_frame()
284 return ff_v4l2_context_enqueue_frame(output, frame); in v4l2_send_frame()
292 AVFrame *frame = s->frame; in v4l2_receive_packet() local
298 if (!frame in v4l2_receive_packet()
[all...]
H A Dsmc.c51 AVFrame *frame; member
87 int stride = s->frame->linesize[0]; in smc_decode_stream()
98 uint8_t * const pixels = s->frame->data[0]; in smc_decode_stream()
100 int image_size = height * s->frame->linesize[0]; in smc_decode_stream()
118 memcpy(s->frame->data[1], s->pal, AVPALETTE_SIZE); in smc_decode_stream()
424 s->frame = av_frame_alloc(); in smc_decode_init()
425 if (!s->frame) in smc_decode_init()
445 if ((ret = ff_reget_buffer(avctx, s->frame, 0)) < 0) in smc_decode_frame()
448 s->frame->palette_has_changed = ff_copy_palette(s->pal, avpkt, avctx); in smc_decode_frame()
455 if ((ret = av_frame_ref(rframe, s->frame)) < in smc_decode_frame()
[all...]
H A Dopusdec.c170 av_log(s->avctx, AV_LOG_ERROR, "Error decoding the redundancy frame.\n"); in opus_decode_redundancy()
186 /* decode the silk frame */ in opus_decode_frame()
199 av_log(s->avctx, AV_LOG_ERROR, "Error decoding a SILK frame.\n"); in opus_decode_frame()
230 av_log(s->avctx, AV_LOG_ERROR, "Invalid redundancy frame size.\n"); in opus_decode_frame()
242 /* decode the CELT frame */ in opus_decode_frame()
393 av_log(s->avctx, AV_LOG_ERROR, "Error decoding an Opus frame.\n"); in opus_decode_subpacket()
415 static int opus_decode_packet(AVCodecContext *avctx, AVFrame *frame, in opus_decode_packet() argument
447 frame->nb_samples = coded_samples + delayed_samples; in opus_decode_packet()
450 if (!frame->nb_samples) { in opus_decode_packet()
456 ret = ff_get_buffer(avctx, frame, in opus_decode_packet()
[all...]
H A Dfmvc.c397 static int decode_frame(AVCodecContext *avctx, AVFrame *frame, in decode_frame() argument
434 if ((ret = ff_get_buffer(avctx, frame, 0)) < 0) in decode_frame()
437 frame->key_frame = 1; in decode_frame()
438 frame->pict_type = AV_PICTURE_TYPE_I; in decode_frame()
441 dst = frame->data[0] + (avctx->height - 1) * frame->linesize[0]; in decode_frame()
444 dst -= frame->linesize[0]; in decode_frame()
520 if ((ret = ff_get_buffer(avctx, frame, 0)) < 0) in decode_frame()
523 frame->key_frame = 0; in decode_frame()
524 frame in decode_frame()
[all...]
H A Dmfenc.c38 AVFrame *frame; member
276 // starts from the input pts of the first frame, while the output pts in mf_sample_to_avpacket()
288 static IMFSample *mf_a_avframe_to_sample(AVCodecContext *avctx, const AVFrame *frame) in mf_a_avframe_to_sample() argument
296 len = frame->nb_samples * bps; in mf_a_avframe_to_sample()
298 sample = ff_create_memory_sample(&c->functions, frame->data[0], len, in mf_a_avframe_to_sample()
301 IMFSample_SetSampleDuration(sample, mf_to_mf_time(avctx, frame->nb_samples)); in mf_a_avframe_to_sample()
305 static IMFSample *mf_v_avframe_to_sample(AVCodecContext *avctx, const AVFrame *frame) in mf_v_avframe_to_sample() argument
337 ret = av_image_copy_to_buffer((uint8_t *)data, size, (void *)frame->data, frame->linesize, in mf_v_avframe_to_sample()
347 IMFSample_SetSampleDuration(sample, mf_to_mf_time(avctx, frame in mf_v_avframe_to_sample()
352 mf_avframe_to_sample(AVCodecContext *avctx, const AVFrame *frame) mf_avframe_to_sample() argument
[all...]
H A Dlibcodec2.c133 static int libcodec2_decode(AVCodecContext *avctx, AVFrame *frame, in libcodec2_decode() argument
142 frame->nb_samples = avctx->frame_size * nframes; in libcodec2_decode()
144 ret = ff_get_buffer(avctx, frame, 0); in libcodec2_decode()
150 output = (int16_t *)frame->data[0]; in libcodec2_decode()
163 const AVFrame *frame, int *got_packet_ptr) in libcodec2_encode()
166 int16_t *samples = (int16_t *)frame->data[0]; in libcodec2_encode()
162 libcodec2_encode(AVCodecContext *avctx, AVPacket *avpkt, const AVFrame *frame, int *got_packet_ptr) libcodec2_encode() argument
H A Dlibshine.c71 const AVFrame *frame, int *got_packet_ptr) in libshine_encode_frame()
79 if (frame) in libshine_encode_frame()
80 data = shine_encode_buffer(s->shine, (int16_t **)frame->data, &written); in libshine_encode_frame()
93 if (frame) { in libshine_encode_frame()
94 if ((ret = ff_af_queue_add(&s->afq, frame)) < 0) in libshine_encode_frame()
70 libshine_encode_frame(AVCodecContext *avctx, AVPacket *avpkt, const AVFrame *frame, int *got_packet_ptr) libshine_encode_frame() argument
H A Davs.c28 AVFrame *frame; member
53 AVFrame *const p = avs->frame; in avs_decode_frame()
161 s->frame = av_frame_alloc(); in avs_decode_init()
162 if (!s->frame) in avs_decode_init()
173 av_frame_free(&s->frame); in avs_decode_end()
/third_party/ffmpeg/libavfilter/
H A Dvf_thumbnail.c24 * selection (such as a black frame) we could get with an absolute seek.
38 AVFrame *buf; ///< cached frame
39 int histogram[HIST_SIZE]; ///< RGB color distribution histogram of the frame
44 int n; ///< current frame
110 // find the frame closer to the average using the sum of squared errors in get_best_frame()
117 // free and reset everything (except the best frame buffer) in get_best_frame()
127 av_log(ctx, AV_LOG_INFO, "frame id #%d (pts_time=%f) selected " in get_best_frame()
135 static int filter_frame(AVFilterLink *inlink, AVFrame *frame) in filter_frame() argument
142 const uint8_t *p = frame->data[0]; in filter_frame()
144 // keep a reference of each frame in filter_frame()
[all...]
H A Dbuffersrc.h44 * Immediately push the frame to the output.
49 * Keep a reference to the frame.
50 * If the frame if reference-counted, create a new reference; otherwise
51 * copy the frame data.
61 * frame is present in the buffer.
62 * The number is reset when a frame is added.
95 * Video only, the frame rate of the input video. This field must only be
150 * Add a frame to the buffer source.
153 * @param frame frame t
[all...]
H A Dvf_readvitc.c209 static int filter_frame(AVFilterLink *inlink, AVFrame *frame) in filter_frame() argument
216 found = read_vitc_line(s, frame->data[0], frame->linesize[0], inlink->w, inlink->h); in filter_frame()
217 av_dict_set(&frame->metadata, "lavfi.readvitc.found", (found ? "1" : "0"), 0); in filter_frame()
219 av_dict_set(&frame->metadata, "lavfi.readvitc.tc_str", make_vitc_tc_string(s->tcbuf, s->line_data), 0); in filter_frame()
221 return ff_filter_frame(outlink, frame); in filter_frame()
242 .description = NULL_IF_CONFIG_SMALL("Read vertical interval timecode and write it to frame metadata."),
/third_party/ffmpeg/libavutil/
H A Dhwcontext_drm.c80 static int drm_get_buffer(AVHWFramesContext *hwfc, AVFrame *frame) in drm_get_buffer() argument
82 frame->buf[0] = av_buffer_pool_get(hwfc->pool); in drm_get_buffer()
83 if (!frame->buf[0]) in drm_get_buffer()
86 frame->data[0] = (uint8_t*)frame->buf[0]->data; in drm_get_buffer()
88 frame->format = AV_PIX_FMT_DRM_PRIME; in drm_get_buffer()
89 frame->width = hwfc->width; in drm_get_buffer()
90 frame->height = hwfc->height; in drm_get_buffer()
H A Dhwcontext_d3d11va.c311 static int d3d11va_get_buffer(AVHWFramesContext *ctx, AVFrame *frame) in d3d11va_get_buffer() argument
315 frame->buf[0] = av_buffer_pool_get(ctx->pool); in d3d11va_get_buffer()
316 if (!frame->buf[0]) in d3d11va_get_buffer()
319 desc = (AVD3D11FrameDescriptor *)frame->buf[0]->data; in d3d11va_get_buffer()
321 frame->data[0] = (uint8_t *)desc->texture; in d3d11va_get_buffer()
322 frame->data[1] = (uint8_t *)desc->index; in d3d11va_get_buffer()
323 frame->format = AV_PIX_FMT_D3D11; in d3d11va_get_buffer()
324 frame->width = ctx->width; in d3d11va_get_buffer()
325 frame->height = ctx->height; in d3d11va_get_buffer()
398 const AVFrame *frame in d3d11va_transfer_data() local
[all...]
/third_party/skia/src/android/
H A DSkAnimatedImage.cpp171 // Final frame. Check to determine whether to stop. in computeNextFrame()
217 SkCodecPrintf("Error getting frameInfo for frame %i\n", in decodeNextFrame()
230 for (Frame* frame : { &fRestoreFrame, &fDecodingFrame }) { in decodeNextFrame()
231 if (frameToDecode == frame->fIndex) { in decodeNextFrame()
233 swap(fDisplayFrame, *frame); in decodeNextFrame()
241 // The following code makes an effort to avoid overwriting a frame that will in decodeNextFrame()
242 // be used again. If frame |i| is_restore_previous, frame |i+1| will not in decodeNextFrame()
243 // depend on frame |i|, so do not overwrite frame | in decodeNextFrame()
[all...]
/third_party/node/deps/v8/src/deoptimizer/
H A Dtranslated-state.h41 // frame, contains:
43 // TranslatedFrame: describes a single unoptimized frame, contains:
198 // `height` is slightly misleading. Yes, this value is related to stack frame
200 // frame height (e.g.: addition/subtraction of context, accumulator, fixed
201 // frame sizes, padding).
338 // and resolving them to values using the supplied frame pointer and
344 // 3. Reading out the frame values.
354 // frame (replacing it with one or several unoptimized frames). It is used by
359 // frame. It is used by stack trace generation and various debugging features.
360 explicit TranslatedState(const JavaScriptFrame* frame);
[all...]
/third_party/node/deps/v8/src/runtime/
H A Druntime-compiler.cc201 JavaScriptFrame* top_frame = top_it.frame(); in RUNTIME_FUNCTION()
234 // Determine the frame that triggered the OSR request. in RUNTIME_FUNCTION()
236 UnoptimizedFrame* frame = UnoptimizedFrame::cast(it.frame()); in RUNTIME_FUNCTION() local
238 DCHECK_IMPLIES(frame->is_interpreted(), in RUNTIME_FUNCTION()
239 frame->LookupCode().is_interpreter_trampoline_builtin()); in RUNTIME_FUNCTION()
240 DCHECK_IMPLIES(frame->is_baseline(), in RUNTIME_FUNCTION()
241 frame->LookupCode().kind() == CodeKind::BASELINE); in RUNTIME_FUNCTION()
242 DCHECK(frame->function().shared().HasBytecodeArray()); in RUNTIME_FUNCTION()
245 BytecodeOffset osr_offset = BytecodeOffset(frame in RUNTIME_FUNCTION()
[all...]
/third_party/skia/third_party/externals/libgifcodec/
H A DSkLibGifCodec.cpp78 auto* frame = reader->frameContext(0); in MakeFromStream() local
79 if (!frame || !frame->isHeaderDefined()) { in MakeFromStream()
137 // If a frame is offscreen, it will have no effect on the output in onGetFrameInfo()
165 // This is possible for an empty frame. Create a dummy with one value (transparent). in initializeColorTable()
189 // - If there is a transparent pixel, and this frame draws on top of another frame in prepareToDecode()
190 // (if the frame is independent with a transparent pixel, we should not decode to in prepareToDecode()
199 return gif_error("Cannot decode multiframe gif (except frame 0) as 565.\n", in prepareToDecode()
203 const auto* frame in prepareToDecode() local
236 const SkGIFFrameContext* frame = fReader->frameContext(frameIndex); initializeSwizzler() local
[all...]
/third_party/wpa_supplicant/wpa_supplicant-2.9_standard/src/drivers/
H A Ddriver_nl80211_event.c32 const u8 *frame, size_t len,
206 const u8 *frame, size_t len) in mlme_event_auth()
223 mgmt = (const struct ieee80211_mgmt *) frame; in mlme_event_auth()
226 "frame"); in mlme_event_auth()
268 const u8 *frame, size_t len, struct nlattr *wmm, in mlme_event_assoc()
288 mgmt = (const struct ieee80211_mgmt *) frame; in mlme_event_assoc()
291 "frame"); in mlme_event_assoc()
316 event.assoc_info.resp_frame = frame; in mlme_event_assoc()
548 "nl80211: MLO: (Re)Association Request/Response frame elements not available"); in nl80211_update_rejected_links_info()
1101 const u8 *frame, size_
205 mlme_event_auth(struct wpa_driver_nl80211_data *drv, const u8 *frame, size_t len) mlme_event_auth() argument
267 mlme_event_assoc(struct wpa_driver_nl80211_data *drv, const u8 *frame, size_t len, struct nlattr *wmm, struct nlattr *req_ie) mlme_event_assoc() argument
1098 mlme_event_mgmt(struct i802_bss *bss, struct nlattr *freq, struct nlattr *sig, const u8 *frame, size_t len, int link_id) global() argument
1149 mlme_event_mgmt_tx_status(struct wpa_driver_nl80211_data *drv, struct nlattr *cookie, const u8 *frame, size_t len, struct nlattr *ack) global() argument
1211 mlme_event_deauth_disassoc(struct wpa_driver_nl80211_data *drv, enum wpa_event_type type, const u8 *frame, size_t len) global() argument
1338 mlme_event_unprot_disconnect(struct wpa_driver_nl80211_data *drv, enum wpa_event_type type, const u8 *frame, size_t len) global() argument
1375 mlme_event_unprot_beacon(struct wpa_driver_nl80211_data *drv, const u8 *frame, size_t len) global() argument
1406 mlme_event(struct i802_bss *bss, enum nl80211_commands cmd, struct nlattr *frame, struct nlattr *addr, struct nlattr *timed_out, struct nlattr *freq, struct nlattr *ack, struct nlattr *cookie, struct nlattr *sig, struct nlattr *wmm, struct nlattr *req_ie, struct nlattr *link) global() argument
3234 nl80211_control_port_frame_tx_status(struct wpa_driver_nl80211_data *drv, const u8 *frame, size_t len, struct nlattr *ack, struct nlattr *cookie) global() argument
3279 struct nlattr *frame = tb[NL80211_ATTR_FRAME]; global() local
[all...]
/third_party/nghttp2/examples/
H A Dlibevent-server.c295 /* Serialize the frame and send (or buffer) the data to
482 const nghttp2_frame *frame, const uint8_t *name, in on_header_callback()
490 switch (frame->hd.type) { in on_header_callback()
492 if (frame->headers.cat != NGHTTP2_HCAT_REQUEST) { in on_header_callback()
496 nghttp2_session_get_stream_user_data(session, frame->hd.stream_id); in on_header_callback()
512 const nghttp2_frame *frame, in on_begin_headers_callback()
517 if (frame->hd.type != NGHTTP2_HEADERS || in on_begin_headers_callback()
518 frame->headers.cat != NGHTTP2_HCAT_REQUEST) { in on_begin_headers_callback()
521 stream_data = create_http2_stream_data(session_data, frame->hd.stream_id); in on_begin_headers_callback()
522 nghttp2_session_set_stream_user_data(session, frame in on_begin_headers_callback()
481 on_header_callback(nghttp2_session *session, const nghttp2_frame *frame, const uint8_t *name, size_t namelen, const uint8_t *value, size_t valuelen, uint8_t flags, void *user_data) on_header_callback() argument
511 on_begin_headers_callback(nghttp2_session *session, const nghttp2_frame *frame, void *user_data) on_begin_headers_callback() argument
576 on_frame_recv_callback(nghttp2_session *session, const nghttp2_frame *frame, void *user_data) on_frame_recv_callback() argument
[all...]
H A Dlibevent-client.c216 const nghttp2_frame *frame, const uint8_t *name, in on_header_callback()
223 switch (frame->hd.type) { in on_header_callback()
225 if (frame->headers.cat == NGHTTP2_HCAT_RESPONSE && in on_header_callback()
226 session_data->stream_data->stream_id == frame->hd.stream_id) { in on_header_callback()
238 const nghttp2_frame *frame, in on_begin_headers_callback()
243 switch (frame->hd.type) { in on_begin_headers_callback()
245 if (frame->headers.cat == NGHTTP2_HCAT_RESPONSE && in on_begin_headers_callback()
246 session_data->stream_data->stream_id == frame->hd.stream_id) { in on_begin_headers_callback()
248 frame->hd.stream_id); in on_begin_headers_callback()
256 received a complete frame fro
215 on_header_callback(nghttp2_session *session, const nghttp2_frame *frame, const uint8_t *name, size_t namelen, const uint8_t *value, size_t valuelen, uint8_t flags, void *user_data) on_header_callback() argument
237 on_begin_headers_callback(nghttp2_session *session, const nghttp2_frame *frame, void *user_data) on_begin_headers_callback() argument
257 on_frame_recv_callback(nghttp2_session *session, const nghttp2_frame *frame, void *user_data) on_frame_recv_callback() argument
[all...]
/third_party/ffmpeg/fftools/
H A Dffmpeg.c179 AVFrame *frame = ist->sub2video.frame; in sub2video_get_blank_frame() local
181 av_frame_unref(frame); in sub2video_get_blank_frame()
182 ist->sub2video.frame->width = ist->dec_ctx->width ? ist->dec_ctx->width : ist->sub2video.w; in sub2video_get_blank_frame()
183 ist->sub2video.frame->height = ist->dec_ctx->height ? ist->dec_ctx->height : ist->sub2video.h; in sub2video_get_blank_frame()
184 ist->sub2video.frame->format = AV_PIX_FMT_RGB32; in sub2video_get_blank_frame()
185 if ((ret = av_frame_get_buffer(frame, 0)) < 0) in sub2video_get_blank_frame()
187 memset(frame->data[0], 0, frame->height * frame in sub2video_get_blank_frame()
224 AVFrame *frame = ist->sub2video.frame; sub2video_push_ref() local
242 AVFrame *frame = ist->sub2video.frame; sub2video_update() local
531 AVFrame *frame; ffmpeg_cleanup() local
764 adjust_frame_pts_to_encoder_tb(OutputFile *of, OutputStream *ost, AVFrame *frame) adjust_frame_pts_to_encoder_tb() argument
811 init_output_stream_wrapper(OutputStream *ost, AVFrame *frame, unsigned int fatal) init_output_stream_wrapper() argument
892 encode_frame(OutputFile *of, OutputStream *ost, AVFrame *frame) encode_frame() argument
974 do_audio_out(OutputFile *of, OutputStream *ost, AVFrame *frame) do_audio_out() argument
1937 ifilter_send_frame(InputFilter *ifilter, AVFrame *frame, int keep_reference) ifilter_send_frame() argument
2047 decode(AVCodecContext *avctx, AVFrame *frame, int *got_frame, AVPacket *pkt) decode() argument
2954 init_output_stream_encode(OutputStream *ost, AVFrame *frame) init_output_stream_encode() argument
3117 init_output_stream(OutputStream *ost, AVFrame *frame, char *error, int error_len) init_output_stream() argument
[all...]
/third_party/mesa3d/src/gallium/drivers/nouveau/
H A Dnouveau_video.c233 bool luma, bool frame, bool forward, bool vert, in nouveau_vpe_mb_mv()
245 assert(frame); // Untested for non-frames in nouveau_vpe_mb_mv()
246 if (!frame) in nouveau_vpe_mb_mv()
275 bool frame = dec->picture_structure == PIPE_MPEG12_PICTURE_STRUCTURE_FRAME; in nouveau_vpe_mb_mv_header() local
280 y = mb->y * (frame ? 16 : 32); in nouveau_vpe_mb_mv_header()
282 y = mb->y * (frame ? 8 : 16); in nouveau_vpe_mb_mv_header()
283 if (frame) in nouveau_vpe_mb_mv_header()
292 if (frame) { in nouveau_vpe_mb_mv_header()
299 nouveau_vpe_mb_mv(dec, base, luma, frame, true, false, in nouveau_vpe_mb_mv_header()
301 nouveau_vpe_mb_mv(dec, base, luma, frame, tru in nouveau_vpe_mb_mv_header()
232 nouveau_vpe_mb_mv(struct nouveau_decoder *dec, unsigned mc_header, bool luma, bool frame, bool forward, bool vert, int x, int y, const short motions[2], unsigned surface, bool first) nouveau_vpe_mb_mv() argument
[all...]
/third_party/python/Lib/idlelib/
H A Dscrolledlist.py12 # Create top frame, with scrollbar and listbox
14 self.frame = frame = Frame(master)
15 self.frame.pack(fill="both", expand=1)
16 self.vbar = vbar = Scrollbar(frame, name="vbar")
18 self.listbox = listbox = Listbox(frame, exportselection=0,
40 self.frame.destroy()

Completed in 20 milliseconds

1...<<11121314151617181920>>...61