/third_party/musl/porting/linux/user/src/gwp_asan/ |
H A D | gwp_asan.c | 287 * We can build library with "-fno-omit-frame-pointer" to get a more accurate call stack.
313 unwind_info *frame = (unwind_info*)(current_frame_addr);
in libc_gwp_asan_unwind_fast() local 315 num_frames, current_frame_addr, stack_end, frame->fp, frame->lr);
in libc_gwp_asan_unwind_fast() 316 if (!frame->lr) {
in libc_gwp_asan_unwind_fast() 320 frame_buf[num_frames] = strip_pac_pc(frame->lr) - 4;
in libc_gwp_asan_unwind_fast() 323 if (frame->fp == prev_fp || frame->lr == prev_lr || frame->fp < current_frame_addr + sizeof(unwind_info) ||
in libc_gwp_asan_unwind_fast() 324 frame in libc_gwp_asan_unwind_fast() [all...] |
/third_party/musl/src/gwp_asan/linux/ |
H A D | gwp_asan.c | 287 * We can build library with "-fno-omit-frame-pointer" to get a more accurate call stack.
313 unwind_info *frame = (unwind_info*)(current_frame_addr);
in libc_gwp_asan_unwind_fast() local 315 num_frames, current_frame_addr, stack_end, frame->fp, frame->lr);
in libc_gwp_asan_unwind_fast() 316 if (!frame->lr) {
in libc_gwp_asan_unwind_fast() 320 frame_buf[num_frames] = strip_pac_pc(frame->lr) - 4;
in libc_gwp_asan_unwind_fast() 323 if (frame->fp == prev_fp || frame->lr == prev_lr || frame->fp < current_frame_addr + sizeof(unwind_info) ||
in libc_gwp_asan_unwind_fast() 324 frame in libc_gwp_asan_unwind_fast() [all...] |
/third_party/nghttp2/lib/ |
H A D | nghttp2_http.c | 351 nghttp2_frame *frame, nghttp2_hd_nv *nv, in nghttp2_http_on_header() 390 if (session->server || frame->hd.type == NGHTTP2_PUSH_PROMISE) { in nghttp2_http_on_header() 435 if (session->server || frame->hd.type == NGHTTP2_PUSH_PROMISE) { in nghttp2_http_on_header() 445 nghttp2_frame *frame) { in nghttp2_http_on_request_headers() 471 if (frame->hd.type == NGHTTP2_PUSH_PROMISE) { in nghttp2_http_on_request_headers() 509 nghttp2_frame *frame) { in nghttp2_http_on_trailer_headers() 512 if ((frame->hd.flags & NGHTTP2_FLAG_END_STREAM) == 0) { in nghttp2_http_on_trailer_headers() 545 nghttp2_frame *frame) { in nghttp2_http_record_request_method() 550 switch (frame->hd.type) { in nghttp2_http_record_request_method() 552 nva = frame in nghttp2_http_record_request_method() 350 nghttp2_http_on_header(nghttp2_session *session, nghttp2_stream *stream, nghttp2_frame *frame, nghttp2_hd_nv *nv, int trailer) nghttp2_http_on_header() argument 444 nghttp2_http_on_request_headers(nghttp2_stream *stream, nghttp2_frame *frame) nghttp2_http_on_request_headers() argument 508 nghttp2_http_on_trailer_headers(nghttp2_stream *stream, nghttp2_frame *frame) nghttp2_http_on_trailer_headers() argument 544 nghttp2_http_record_request_method(nghttp2_stream *stream, nghttp2_frame *frame) nghttp2_http_record_request_method() argument [all...] |
/third_party/node/deps/nghttp2/lib/ |
H A D | nghttp2_http.c | 351 nghttp2_frame *frame, nghttp2_hd_nv *nv, in nghttp2_http_on_header() 390 if (session->server || frame->hd.type == NGHTTP2_PUSH_PROMISE) { in nghttp2_http_on_header() 435 if (session->server || frame->hd.type == NGHTTP2_PUSH_PROMISE) { in nghttp2_http_on_header() 445 nghttp2_frame *frame) { in nghttp2_http_on_request_headers() 471 if (frame->hd.type == NGHTTP2_PUSH_PROMISE) { in nghttp2_http_on_request_headers() 509 nghttp2_frame *frame) { in nghttp2_http_on_trailer_headers() 512 if ((frame->hd.flags & NGHTTP2_FLAG_END_STREAM) == 0) { in nghttp2_http_on_trailer_headers() 545 nghttp2_frame *frame) { in nghttp2_http_record_request_method() 550 switch (frame->hd.type) { in nghttp2_http_record_request_method() 552 nva = frame in nghttp2_http_record_request_method() 350 nghttp2_http_on_header(nghttp2_session *session, nghttp2_stream *stream, nghttp2_frame *frame, nghttp2_hd_nv *nv, int trailer) nghttp2_http_on_header() argument 444 nghttp2_http_on_request_headers(nghttp2_stream *stream, nghttp2_frame *frame) nghttp2_http_on_request_headers() argument 508 nghttp2_http_on_trailer_headers(nghttp2_stream *stream, nghttp2_frame *frame) nghttp2_http_on_trailer_headers() argument 544 nghttp2_http_record_request_method(nghttp2_stream *stream, nghttp2_frame *frame) nghttp2_http_record_request_method() argument [all...] |
/third_party/mesa3d/src/intel/common/ |
H A D | intel_measure.c | 51 { "frame", INTEL_MEASURE_FRAME }, 122 fprintf(stderr, "INTEL_MEASURE start frame may " in intel_measure_init() 135 fprintf(stderr, "INTEL_MEASURE count frame must be positive: %d\n", in intel_measure_init() 210 fputs("draw_start,draw_end,frame,batch," in intel_measure_init() 217 device->frame = 0; in intel_measure_init() 264 * - at frame boundaries 319 * Notify intel_measure that a frame is about to begin. 321 * Configuration values and the control fifo may commence measurement at frame 325 intel_measure_frame_transition(unsigned frame) in intel_measure_frame_transition() argument 327 if (frame in intel_measure_frame_transition() [all...] |
/third_party/python/Lib/ |
H A D | cgitb.py | 11 context - number of lines of source code to show for each stack frame 69 def lookup(name, frame, locals): 73 if name in frame.f_globals: 74 return 'global', frame.f_globals[name] 75 if '__builtins__' in frame.f_globals: 76 builtins = frame.f_globals['__builtins__'] 85 def scanvars(reader, frame, locals): 96 where, value = lookup(token, frame, locals) 129 for frame, file, lnum, func, lines, index in records: 135 args, varargs, varkw, locals = inspect.getargvalues(frame) [all...] |
/third_party/ffmpeg/libavcodec/ |
H A D | pcm.c | 89 samples_ ## type = (const type *) frame->extended_data[c]; \ 97 const AVFrame *frame, int *got_packet_ptr) in pcm_encode_frame() 110 n = frame->nb_samples * avctx->ch_layout.nb_channels; in pcm_encode_frame() 111 samples = (const short *)frame->data[0]; in pcm_encode_frame() 215 const uint8_t *src = frame->extended_data[c]; in pcm_encode_frame() 321 dst = frame->extended_data[c]; \ 329 static int pcm_decode_frame(AVCodecContext *avctx, AVFrame *frame, in pcm_decode_frame() argument 380 frame->nb_samples = n * samples_per_block / channels; in pcm_decode_frame() 381 if ((ret = ff_get_buffer(avctx, frame, 0)) < 0) in pcm_decode_frame() 383 samples = frame in pcm_decode_frame() 96 pcm_encode_frame(AVCodecContext *avctx, AVPacket *avpkt, const AVFrame *frame, int *got_packet_ptr) pcm_encode_frame() argument [all...] |
H A D | xan.c | 209 static inline void xan_wc3_output_pixel_run(XanContext *s, AVFrame *frame, in xan_wc3_output_pixel_run() argument 219 palette_plane = frame->data[0]; in xan_wc3_output_pixel_run() 220 stride = frame->linesize[0]; in xan_wc3_output_pixel_run() 239 static inline void xan_wc3_copy_pixel_run(XanContext *s, AVFrame *frame, in xan_wc3_copy_pixel_run() argument 255 palette_plane = frame->data[0]; in xan_wc3_copy_pixel_run() 259 stride = frame->linesize[0]; in xan_wc3_copy_pixel_run() 297 static int xan_wc3_decode_frame(XanContext *s, AVFrame *frame) in xan_wc3_decode_frame() argument 355 /* use the decoded data segments to build the frame */ in xan_wc3_decode_frame() 423 /* run of (size) pixels is unchanged from last frame */ in xan_wc3_decode_frame() 424 xan_wc3_copy_pixel_run(s, frame, in xan_wc3_decode_frame() 541 xan_decode_frame(AVCodecContext *avctx, AVFrame *frame, int *got_frame, AVPacket *avpkt) xan_decode_frame() argument [all...] |
H A D | libilbc.c | 76 av_log(avctx, AV_LOG_ERROR, "iLBC frame mode not indicated\n"); in ilbc_decode_init() 90 static int ilbc_decode_frame(AVCodecContext *avctx, AVFrame *frame, in ilbc_decode_frame() argument 100 av_log(avctx, AV_LOG_ERROR, "iLBC frame too short (%u, should be %u)\n", in ilbc_decode_frame() 102 av_log(avctx, AV_LOG_ERROR, "iLBC frame too short (%u, should be " in ilbc_decode_frame() 109 frame->nb_samples = s->decoder.blockl; in ilbc_decode_frame() 110 if ((ret = ff_get_buffer(avctx, frame, 0)) < 0) in ilbc_decode_frame() 113 WebRtcIlbcfix_DecodeImpl((int16_t *) frame->data[0], (const uint16_t *) buf, &s->decoder, 1); in ilbc_decode_frame() 182 const AVFrame *frame, int *got_packet_ptr) in ilbc_encode_frame() 190 WebRtcIlbcfix_EncodeImpl((uint16_t *) avpkt->data, (const int16_t *) frame->data[0], &s->encoder); in ilbc_encode_frame() 181 ilbc_encode_frame(AVCodecContext *avctx, AVPacket *avpkt, const AVFrame *frame, int *got_packet_ptr) ilbc_encode_frame() argument
|
H A D | frame_thread_encoder.c | 84 AVFrame *frame; in worker() local 104 frame = task->indata; in worker() 107 ret = ffcodec(avctx->codec)->cb.encode(avctx, pkt, frame, &got_packet); in worker() 112 pkt->pts = pkt->dts = frame->pts; in worker() 118 av_frame_unref(frame); in worker() 157 "MJPEG CBR encoding works badly with frame multi-threading, consider " in ff_frame_thread_encoder_init() 172 // huffyuv does not support these with multiple frame threads currently in ff_frame_thread_encoder_init() 284 AVFrame *frame, int *got_packet_ptr) in ff_thread_video_encode_frame() 291 if(frame){ in ff_thread_video_encode_frame() 292 av_frame_move_ref(c->tasks[c->task_index].indata, frame); in ff_thread_video_encode_frame() 283 ff_thread_video_encode_frame(AVCodecContext *avctx, AVPacket *pkt, AVFrame *frame, int *got_packet_ptr) ff_thread_video_encode_frame() argument [all...] |
H A D | lscrdec.c | 24 #include "libavutil/frame.h" 115 AVFrame *frame = s->last_picture; in decode_frame_lscr() local 129 ret = ff_reget_buffer(avctx, frame, in decode_frame_lscr() 157 frame->key_frame = (nb_blocks == 1) && in decode_frame_lscr() 185 s->image_buf = frame->data[0] + (avctx->height - y - 1) * frame->linesize[0] + x * 3; in decode_frame_lscr() 186 s->image_linesize =-frame->linesize[0]; in decode_frame_lscr() 202 frame->pict_type = frame->key_frame ? AV_PICTURE_TYPE_I : AV_PICTURE_TYPE_P; in decode_frame_lscr() 204 if ((ret = av_frame_ref(rframe, frame)) < in decode_frame_lscr() [all...] |
/third_party/node/deps/v8/src/runtime/ |
H A D | runtime-test-wasm.cc | 150 // Find the caller wasm frame. in RUNTIME_FUNCTION() 155 WasmFrame* frame = WasmFrame::cast(it.frame()); in RUNTIME_FUNCTION() local 158 int func_index = frame->function_index(); in RUNTIME_FUNCTION() 159 const wasm::WasmModule* module = frame->wasm_instance().module(); in RUNTIME_FUNCTION() 161 wasm::ModuleWireBytes(frame->native_module()->wire_bytes()); in RUNTIME_FUNCTION() 166 wasm::WasmCode* code = frame->wasm_code(); in RUNTIME_FUNCTION() 187 // Find the caller wasm frame. in RUNTIME_FUNCTION() 192 WasmFrame* frame = WasmFrame::cast(it.frame()); in RUNTIME_FUNCTION() local 416 WasmFrame* frame = WasmFrame::cast(it.frame()); RUNTIME_FUNCTION() local [all...] |
/third_party/skia/third_party/externals/libwebp/src/mux/ |
H A D | muxedit.c | 96 // Create data for frame given image data, offsets and duration. 99 WebPData* const frame) { in CreateFrameData() 120 frame->bytes = frame_bytes; in CreateFrameData() 121 frame->size = frame_size; in CreateFrameData() 283 return WEBP_MUX_INVALID_ARGUMENT; // Conflicting frame types. in WebPMuxPushFrame() 293 WebPData frame; in WebPMuxPushFrame() local 305 err = CreateFrameData(wpi.width_, wpi.height_, &tmp, &frame); in WebPMuxPushFrame() 307 // Add frame chunk (with copy_data = 1). in WebPMuxPushFrame() 308 err = AddDataToChunkList(&frame, 1, tag, &wpi.header_); in WebPMuxPushFrame() 309 WebPDataClear(&frame); // fram in WebPMuxPushFrame() 97 CreateFrameData( int width, int height, const WebPMuxFrameInfo* const info, WebPData* const frame) CreateFrameData() argument 555 WebPMuxImage* frame = NULL; MuxCleanup() local [all...] |
/third_party/curl/lib/ |
H A D | cf-h2-proxy.c | 297 const nghttp2_frame *frame, 301 const nghttp2_frame *frame, 308 const nghttp2_frame *frame, 568 static int proxy_h2_fr_print(const nghttp2_frame *frame, in proxy_h2_fr_print() argument 571 switch(frame->hd.type) { in proxy_h2_fr_print() 575 (int)frame->hd.length, in proxy_h2_fr_print() 576 !!(frame->hd.flags & NGHTTP2_FLAG_END_STREAM), in proxy_h2_fr_print() 577 (int)frame->data.padlen); in proxy_h2_fr_print() 582 (int)frame->hd.length, in proxy_h2_fr_print() 583 !!(frame in proxy_h2_fr_print() 640 proxy_h2_on_frame_send(nghttp2_session *session, const nghttp2_frame *frame, void *userp) proxy_h2_on_frame_send() argument 660 proxy_h2_on_frame_recv(nghttp2_session *session, const nghttp2_frame *frame, void *userp) proxy_h2_on_frame_recv() argument 736 proxy_h2_on_header(nghttp2_session *session, const nghttp2_frame *frame, const uint8_t *name, size_t namelen, const uint8_t *value, size_t valuelen, uint8_t flags, void *userp) proxy_h2_on_header() argument [all...] |
/third_party/elfutils/tests/ |
H A D | dwarfcfi.c | 82 Dwarf_Frame *frame; in handle_address() local 83 int result = dwarf_cfi_addrframe (cfi, pc, &frame); in handle_address() 93 int ra_regno = dwarf_frame_info (frame, &start, &end, &signalp); in handle_address() 103 ra_regno, signalp ? " (signal frame)" : ""); in handle_address() 110 result = dwarf_frame_cfa (frame, &cfa_ops, &cfa_nops); in handle_address() 122 int reg_result = dwarf_frame_register (frame, r, ops_mem, &ops, &nops); in handle_address() 127 free (frame); in handle_address()
|
/third_party/ffmpeg/libavfilter/ |
H A D | af_apad.c | 75 static int filter_frame(AVFilterLink *inlink, AVFrame *frame) in filter_frame() argument 81 s->whole_len_left = FFMAX(s->whole_len_left - frame->nb_samples, 0); in filter_frame() 83 "n_out:%d whole_len_left:%"PRId64"\n", frame->nb_samples, s->whole_len_left); in filter_frame() 86 s->next_pts = frame->pts + av_rescale_q(frame->nb_samples, (AVRational){1, inlink->sample_rate}, inlink->time_base); in filter_frame() 87 return ff_filter_frame(ctx->outputs[0], frame); in filter_frame()
|
H A D | vf_dejudder.c | 35 * - In order to avoid calculating this sum ever frame, a running tally 36 * is maintained in ctx->new_pts. Each frame the new term at the start 119 static int filter_frame(AVFilterLink *inlink, AVFrame *frame) in filter_frame() argument 126 int64_t next_pts = frame->pts; in filter_frame() 130 return ff_filter_frame(outlink, frame); in filter_frame() 151 frame->pts = s->new_pts; in filter_frame() 155 av_log(ctx, AV_LOG_DEBUG, "next=%"PRId64", new=%"PRId64"\n", next_pts, frame->pts); in filter_frame() 157 return ff_filter_frame(outlink, frame); in filter_frame()
|
/kernel/linux/linux-5.10/drivers/net/ |
H A D | veth.c | 462 struct xdp_frame *frame = frames[i]; in veth_xdp_xmit() local 463 void *ptr = veth_xdp_to_ptr(frame); in veth_xdp_xmit() 465 if (unlikely(frame->len > max_len || in veth_xdp_xmit() 467 xdp_return_frame_rx_napi(frame); in veth_xdp_xmit() 552 struct xdp_frame *frame = xdp_convert_buff_to_frame(xdp); in veth_xdp_tx() local 554 if (unlikely(!frame)) in veth_xdp_tx() 560 bq->q[bq->count++] = frame; in veth_xdp_tx() 566 struct xdp_frame *frame, in veth_xdp_rcv_one() 570 void *hard_start = frame->data - frame in veth_xdp_rcv_one() 565 veth_xdp_rcv_one(struct veth_rq *rq, struct xdp_frame *frame, struct veth_xdp_tx_bq *bq, struct veth_stats *stats) veth_xdp_rcv_one() argument 813 struct xdp_frame *frame = veth_ptr_to_xdp(ptr); veth_xdp_rcv() local [all...] |
/kernel/linux/linux-6.6/drivers/net/thunderbolt/ |
H A D | main.c | 52 * struct thunderbolt_ip_frame_header - Header for each Thunderbolt frame 53 * @frame_size: size of the data with the frame 55 * @frame_id: ID of the frame to match frames to specific packet 58 * Each data frame passed to the high-speed DMA ring has this header. If 138 struct ring_frame frame; member 172 * @rx_hdr: Copy of the currently processed Rx frame. Used when a 331 return tf->frame.size ? : TBNET_FRAME_SIZE; in tbnet_frame_size() 358 trace_tbnet_free_frame(i, tf->page, tf->frame.buffer_phy, dir); in tbnet_free_buffers() 360 if (tf->frame.buffer_phy) in tbnet_free_buffers() 361 dma_unmap_page(dma_dev, tf->frame in tbnet_free_buffers() 569 tbnet_tx_callback(struct tb_ring *ring, struct ring_frame *frame, bool canceled) tbnet_tx_callback() argument 813 struct ring_frame *frame; tbnet_poll() local [all...] |
/third_party/node/deps/v8/src/builtins/ |
H A D | accessors.cc | 407 Handle<JSObject> ArgumentsForInlinedFunction(JavaScriptFrame* frame, in ArgumentsForInlinedFunction() argument 409 Isolate* isolate = frame->isolate(); in ArgumentsForInlinedFunction() 412 TranslatedState translated_values(frame); in ArgumentsForInlinedFunction() 413 translated_values.Prepare(frame->fp()); in ArgumentsForInlinedFunction() 434 // If we materialize any object, we should deoptimize the frame because we in ArgumentsForInlinedFunction() 444 translated_values.StoreMaterializedValuesAndDeopt(frame); in ArgumentsForInlinedFunction() 451 int FindFunctionInFrame(JavaScriptFrame* frame, Handle<JSFunction> function) { in FindFunctionInFrame() argument 453 frame->Summarize(&frames); in FindFunctionInFrame() 465 JavaScriptFrame* frame = it->frame(); in GetFrameArguments() local 503 FunctionGetArguments(JavaScriptFrame* frame, int inlined_jsframe_index) FunctionGetArguments() argument 527 JavaScriptFrame* frame = it.frame(); FunctionArgumentsGetter() local [all...] |
/kernel/linux/linux-6.6/drivers/media/platform/amphion/ |
H A D | venc.c | 751 * redundant data at the beginning of frame. 755 static int venc_precheck_encoded_frame(struct vpu_inst *inst, struct venc_frame_t *frame) in venc_precheck_encoded_frame() argument 760 if (!frame || !frame->bytesused) in venc_precheck_encoded_frame() 766 frame->info.wptr - inst->stream_buffer.phys, in venc_precheck_encoded_frame() 767 frame->bytesused); in venc_precheck_encoded_frame() 769 frame->bytesused -= skipped; in venc_precheck_encoded_frame() 770 frame->info.wptr = vpu_helper_step_walk(&inst->stream_buffer, in venc_precheck_encoded_frame() 771 frame->info.wptr, skipped); in venc_precheck_encoded_frame() 780 struct venc_frame_t *frame, in venc_get_one_encoded_frame() 779 venc_get_one_encoded_frame(struct vpu_inst *inst, struct venc_frame_t *frame, struct vb2_v4l2_buffer *vbuf) venc_get_one_encoded_frame() argument 835 struct venc_frame_t *frame; venc_get_encoded_frames() local 855 struct venc_frame_t *frame; venc_frame_encoded() local 1053 struct venc_frame_t *frame; venc_cleanup_frames() local 1126 struct venc_frame_t *frame = NULL; venc_process_capture() local [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/bridge/synopsys/ |
H A D | dw-hdmi.c | 632 * bits in the IEC60958 frame. For the moment this configuration is only 1646 struct hdmi_avi_infoframe frame; in hdmi_config_AVI() local 1649 /* Initialise info frame from DRM mode */ in hdmi_config_AVI() 1650 drm_hdmi_avi_infoframe_from_display_mode(&frame, connector, mode); in hdmi_config_AVI() 1653 drm_hdmi_avi_infoframe_quant_range(&frame, connector, mode, in hdmi_config_AVI() 1658 frame.quantization_range = HDMI_QUANTIZATION_RANGE_DEFAULT; in hdmi_config_AVI() 1659 frame.ycc_quantization_range = in hdmi_config_AVI() 1664 frame.colorspace = HDMI_COLORSPACE_YUV444; in hdmi_config_AVI() 1666 frame.colorspace = HDMI_COLORSPACE_YUV422; in hdmi_config_AVI() 1668 frame in hdmi_config_AVI() 1773 struct hdmi_vendor_infoframe frame; hdmi_config_vendor_specific_infoframe() local 1827 struct hdmi_drm_infoframe frame; hdmi_config_drm_infoframe() local [all...] |
/kernel/linux/linux-5.10/drivers/usb/host/ |
H A D | fhci-q.c | 53 void fhci_add_td_to_frame(struct fhci_time_frame *frame, struct td *td) in fhci_add_td_to_frame() argument 55 list_add_tail(&td->frame_lh, &frame->tds_list); in fhci_add_td_to_frame() 82 struct td *fhci_remove_td_from_frame(struct fhci_time_frame *frame) in fhci_remove_td_from_frame() argument 86 if (!list_empty(&frame->tds_list)) { in fhci_remove_td_from_frame() 87 td = list_entry(frame->tds_list.next, struct td, frame_lh); in fhci_remove_td_from_frame() 88 list_del_init(frame->tds_list.next); in fhci_remove_td_from_frame() 95 struct td *fhci_peek_td_from_frame(struct fhci_time_frame *frame) in fhci_peek_td_from_frame() argument 99 if (!list_empty(&frame->tds_list)) in fhci_peek_td_from_frame() 100 td = list_entry(frame->tds_list.next, struct td, frame_lh); in fhci_peek_td_from_frame()
|
/kernel/linux/linux-6.6/drivers/usb/host/ |
H A D | fhci-q.c | 53 void fhci_add_td_to_frame(struct fhci_time_frame *frame, struct td *td) in fhci_add_td_to_frame() argument 55 list_add_tail(&td->frame_lh, &frame->tds_list); in fhci_add_td_to_frame() 82 struct td *fhci_remove_td_from_frame(struct fhci_time_frame *frame) in fhci_remove_td_from_frame() argument 86 if (!list_empty(&frame->tds_list)) { in fhci_remove_td_from_frame() 87 td = list_entry(frame->tds_list.next, struct td, frame_lh); in fhci_remove_td_from_frame() 88 list_del_init(frame->tds_list.next); in fhci_remove_td_from_frame() 95 struct td *fhci_peek_td_from_frame(struct fhci_time_frame *frame) in fhci_peek_td_from_frame() argument 99 if (!list_empty(&frame->tds_list)) in fhci_peek_td_from_frame() 100 td = list_entry(frame->tds_list.next, struct td, frame_lh); in fhci_peek_td_from_frame()
|
/third_party/ffmpeg/libavutil/ |
H A D | downmix_info.c | 22 #include "frame.h" 24 AVDownmixInfo *av_downmix_info_update_side_data(AVFrame *frame) in av_downmix_info_update_side_data() argument 28 side_data = av_frame_get_side_data(frame, AV_FRAME_DATA_DOWNMIX_INFO); in av_downmix_info_update_side_data() 31 side_data = av_frame_new_side_data(frame, AV_FRAME_DATA_DOWNMIX_INFO, in av_downmix_info_update_side_data()
|