Home
last modified time | relevance | path

Searched refs:frame (Results 376 - 400 of 1514) sorted by relevance

1...<<11121314151617181920>>...61

/third_party/ffmpeg/libavcodec/aarch64/
H A Dneontest.c58 wrap(avcodec_send_frame(AVCodecContext *avctx, const AVFrame *frame)) in avcodec_send_frame() argument
60 testneonclobbers(avcodec_send_frame, avctx, frame); in avcodec_send_frame()
63 wrap(avcodec_receive_frame(AVCodecContext *avctx, AVFrame *frame)) in avcodec_receive_frame() argument
65 testneonclobbers(avcodec_receive_frame, avctx, frame); in avcodec_receive_frame()
/third_party/ffmpeg/libavcodec/x86/
H A Dw64xmmtest.c58 wrap(avcodec_send_frame(AVCodecContext *avctx, const AVFrame *frame)) in avcodec_send_frame() argument
60 testxmmclobbers(avcodec_send_frame, avctx, frame); in avcodec_send_frame()
63 wrap(avcodec_receive_frame(AVCodecContext *avctx, AVFrame *frame)) in avcodec_receive_frame() argument
65 testxmmclobbers(avcodec_receive_frame, avctx, frame); in avcodec_receive_frame()
/third_party/skia/third_party/externals/oboe/src/flowgraph/resampler/
H A DPolyphaseResamplerMono.cpp28 void PolyphaseResamplerMono::writeFrame(const float *frame) { in writeFrame() argument
29 // Move cursor before write so that cursor points to last written frame in read. in writeFrame()
36 const float sample = frame[0]; in writeFrame()
42 void PolyphaseResamplerMono::readFrame(float *frame) { in readFrame() argument
61 frame[0] = sum; in readFrame()
/third_party/curl/lib/
H A Dcf-h2-proxy.c297 const nghttp2_frame *frame,
301 const nghttp2_frame *frame,
308 const nghttp2_frame *frame,
568 static int proxy_h2_fr_print(const nghttp2_frame *frame, in proxy_h2_fr_print() argument
571 switch(frame->hd.type) { in proxy_h2_fr_print()
575 (int)frame->hd.length, in proxy_h2_fr_print()
576 !!(frame->hd.flags & NGHTTP2_FLAG_END_STREAM), in proxy_h2_fr_print()
577 (int)frame->data.padlen); in proxy_h2_fr_print()
582 (int)frame->hd.length, in proxy_h2_fr_print()
583 !!(frame in proxy_h2_fr_print()
640 proxy_h2_on_frame_send(nghttp2_session *session, const nghttp2_frame *frame, void *userp) proxy_h2_on_frame_send() argument
660 proxy_h2_on_frame_recv(nghttp2_session *session, const nghttp2_frame *frame, void *userp) proxy_h2_on_frame_recv() argument
736 proxy_h2_on_header(nghttp2_session *session, const nghttp2_frame *frame, const uint8_t *name, size_t namelen, const uint8_t *value, size_t valuelen, uint8_t flags, void *userp) proxy_h2_on_header() argument
[all...]
/third_party/ffmpeg/libavfilter/
H A Df_metadata.c23 * filter for manipulating frame metadata
105 { "select", "select frame", 0, AV_OPT_TYPE_CONST, {.i64 = METADATA_SELECT }, 0, 0, FLAGS, "mode" }, \
305 static int filter_frame(AVFilterLink *inlink, AVFrame *frame) in filter_frame() argument
310 AVDictionary **metadata = &frame->metadata; in filter_frame()
319 return ff_filter_frame(outlink, frame); in filter_frame()
322 return ff_filter_frame(outlink, frame); in filter_frame()
331 return ff_filter_frame(outlink, frame); in filter_frame()
336 return ff_filter_frame(outlink, frame); in filter_frame()
339 s->print(ctx, "frame:%-4"PRId64" pts:%-7s pts_time:%s\n", in filter_frame()
340 inlink->frame_count_out, av_ts2str(frame in filter_frame()
[all...]
H A Dqp_table.c21 #include "libavutil/frame.h"
27 int ff_qp_table_extract(AVFrame *frame, int8_t **table, int *table_w, int *table_h, in ff_qp_table_extract() argument
32 unsigned int mb_h = (frame->height + 15) / 16; in ff_qp_table_extract()
33 unsigned int mb_w = (frame->width + 15) / 16; in ff_qp_table_extract()
39 sd = av_frame_get_side_data(frame, AV_FRAME_DATA_VIDEO_ENC_PARAMS); in ff_qp_table_extract()
/third_party/ffmpeg/libavcodec/
H A Dvmdvideo.c31 * codec initialization. Each encoded frame that is sent to this decoder
32 * is expected to be prepended with the appropriate 16-byte frame
184 static int vmd_decode(VmdVideoContext *s, AVFrame *frame) in vmd_decode() argument
193 unsigned char *dp; /* pointer to current frame */ in vmd_decode()
194 unsigned char *pp; /* pointer to previous frame */ in vmd_decode()
235 * frame before the decode */ in vmd_decode()
240 memcpy(frame->data[0], s->prev_frame->data[0], in vmd_decode()
241 s->avctx->height * frame->linesize[0]); in vmd_decode()
274 "Trying to unpack LZ-compressed frame with no LZ buffer\n"); in vmd_decode()
285 dp = &frame in vmd_decode()
433 vmdvideo_decode_frame(AVCodecContext *avctx, AVFrame *frame, int *got_frame, AVPacket *avpkt) vmdvideo_decode_frame() argument
[all...]
H A Dpcm.c89 samples_ ## type = (const type *) frame->extended_data[c]; \
97 const AVFrame *frame, int *got_packet_ptr) in pcm_encode_frame()
110 n = frame->nb_samples * avctx->ch_layout.nb_channels; in pcm_encode_frame()
111 samples = (const short *)frame->data[0]; in pcm_encode_frame()
215 const uint8_t *src = frame->extended_data[c]; in pcm_encode_frame()
321 dst = frame->extended_data[c]; \
329 static int pcm_decode_frame(AVCodecContext *avctx, AVFrame *frame, in pcm_decode_frame() argument
380 frame->nb_samples = n * samples_per_block / channels; in pcm_decode_frame()
381 if ((ret = ff_get_buffer(avctx, frame, 0)) < 0) in pcm_decode_frame()
383 samples = frame in pcm_decode_frame()
96 pcm_encode_frame(AVCodecContext *avctx, AVPacket *avpkt, const AVFrame *frame, int *got_packet_ptr) pcm_encode_frame() argument
[all...]
H A Dxan.c209 static inline void xan_wc3_output_pixel_run(XanContext *s, AVFrame *frame, in xan_wc3_output_pixel_run() argument
219 palette_plane = frame->data[0]; in xan_wc3_output_pixel_run()
220 stride = frame->linesize[0]; in xan_wc3_output_pixel_run()
239 static inline void xan_wc3_copy_pixel_run(XanContext *s, AVFrame *frame, in xan_wc3_copy_pixel_run() argument
255 palette_plane = frame->data[0]; in xan_wc3_copy_pixel_run()
259 stride = frame->linesize[0]; in xan_wc3_copy_pixel_run()
297 static int xan_wc3_decode_frame(XanContext *s, AVFrame *frame) in xan_wc3_decode_frame() argument
355 /* use the decoded data segments to build the frame */ in xan_wc3_decode_frame()
423 /* run of (size) pixels is unchanged from last frame */ in xan_wc3_decode_frame()
424 xan_wc3_copy_pixel_run(s, frame, in xan_wc3_decode_frame()
541 xan_decode_frame(AVCodecContext *avctx, AVFrame *frame, int *got_frame, AVPacket *avpkt) xan_decode_frame() argument
[all...]
H A Dpafaudio.c45 static int paf_audio_decode(AVCodecContext *avctx, AVFrame *frame, in paf_audio_decode() argument
57 frame->nb_samples = PAF_SOUND_SAMPLES * frames; in paf_audio_decode()
58 if ((ret = ff_get_buffer(avctx, frame, 0)) < 0) in paf_audio_decode()
61 output_samples = (int16_t *)frame->data[0]; in paf_audio_decode()
H A Dframe_thread_encoder.c84 AVFrame *frame; in worker() local
104 frame = task->indata; in worker()
107 ret = ffcodec(avctx->codec)->cb.encode(avctx, pkt, frame, &got_packet); in worker()
112 pkt->pts = pkt->dts = frame->pts; in worker()
118 av_frame_unref(frame); in worker()
157 "MJPEG CBR encoding works badly with frame multi-threading, consider " in ff_frame_thread_encoder_init()
172 // huffyuv does not support these with multiple frame threads currently in ff_frame_thread_encoder_init()
284 AVFrame *frame, int *got_packet_ptr) in ff_thread_video_encode_frame()
291 if(frame){ in ff_thread_video_encode_frame()
292 av_frame_move_ref(c->tasks[c->task_index].indata, frame); in ff_thread_video_encode_frame()
283 ff_thread_video_encode_frame(AVCodecContext *avctx, AVPacket *pkt, AVFrame *frame, int *got_packet_ptr) ff_thread_video_encode_frame() argument
[all...]
H A Dlscrdec.c24 #include "libavutil/frame.h"
115 AVFrame *frame = s->last_picture; in decode_frame_lscr() local
129 ret = ff_reget_buffer(avctx, frame, in decode_frame_lscr()
157 frame->key_frame = (nb_blocks == 1) && in decode_frame_lscr()
185 s->image_buf = frame->data[0] + (avctx->height - y - 1) * frame->linesize[0] + x * 3; in decode_frame_lscr()
186 s->image_linesize =-frame->linesize[0]; in decode_frame_lscr()
202 frame->pict_type = frame->key_frame ? AV_PICTURE_TYPE_I : AV_PICTURE_TYPE_P; in decode_frame_lscr()
204 if ((ret = av_frame_ref(rframe, frame)) < in decode_frame_lscr()
[all...]
H A Dlibilbc.c76 av_log(avctx, AV_LOG_ERROR, "iLBC frame mode not indicated\n"); in ilbc_decode_init()
90 static int ilbc_decode_frame(AVCodecContext *avctx, AVFrame *frame, in ilbc_decode_frame() argument
100 av_log(avctx, AV_LOG_ERROR, "iLBC frame too short (%u, should be %u)\n", in ilbc_decode_frame()
102 av_log(avctx, AV_LOG_ERROR, "iLBC frame too short (%u, should be " in ilbc_decode_frame()
109 frame->nb_samples = s->decoder.blockl; in ilbc_decode_frame()
110 if ((ret = ff_get_buffer(avctx, frame, 0)) < 0) in ilbc_decode_frame()
113 WebRtcIlbcfix_DecodeImpl((int16_t *) frame->data[0], (const uint16_t *) buf, &s->decoder, 1); in ilbc_decode_frame()
182 const AVFrame *frame, int *got_packet_ptr) in ilbc_encode_frame()
190 WebRtcIlbcfix_EncodeImpl((uint16_t *) avpkt->data, (const int16_t *) frame->data[0], &s->encoder); in ilbc_encode_frame()
181 ilbc_encode_frame(AVCodecContext *avctx, AVPacket *avpkt, const AVFrame *frame, int *got_packet_ptr) ilbc_encode_frame() argument
/third_party/musl/porting/linux/user/src/gwp_asan/
H A Dgwp_asan.c287 * We can build library with "-fno-omit-frame-pointer" to get a more accurate call stack.
313 unwind_info *frame = (unwind_info*)(current_frame_addr); in libc_gwp_asan_unwind_fast() local
315 num_frames, current_frame_addr, stack_end, frame->fp, frame->lr); in libc_gwp_asan_unwind_fast()
316 if (!frame->lr) { in libc_gwp_asan_unwind_fast()
320 frame_buf[num_frames] = strip_pac_pc(frame->lr) - 4; in libc_gwp_asan_unwind_fast()
323 if (frame->fp == prev_fp || frame->lr == prev_lr || frame->fp < current_frame_addr + sizeof(unwind_info) || in libc_gwp_asan_unwind_fast()
324 frame in libc_gwp_asan_unwind_fast()
[all...]
/third_party/musl/src/gwp_asan/linux/
H A Dgwp_asan.c287 * We can build library with "-fno-omit-frame-pointer" to get a more accurate call stack.
313 unwind_info *frame = (unwind_info*)(current_frame_addr); in libc_gwp_asan_unwind_fast() local
315 num_frames, current_frame_addr, stack_end, frame->fp, frame->lr); in libc_gwp_asan_unwind_fast()
316 if (!frame->lr) { in libc_gwp_asan_unwind_fast()
320 frame_buf[num_frames] = strip_pac_pc(frame->lr) - 4; in libc_gwp_asan_unwind_fast()
323 if (frame->fp == prev_fp || frame->lr == prev_lr || frame->fp < current_frame_addr + sizeof(unwind_info) || in libc_gwp_asan_unwind_fast()
324 frame in libc_gwp_asan_unwind_fast()
[all...]
/third_party/nghttp2/lib/
H A Dnghttp2_http.c351 nghttp2_frame *frame, nghttp2_hd_nv *nv, in nghttp2_http_on_header()
390 if (session->server || frame->hd.type == NGHTTP2_PUSH_PROMISE) { in nghttp2_http_on_header()
435 if (session->server || frame->hd.type == NGHTTP2_PUSH_PROMISE) { in nghttp2_http_on_header()
445 nghttp2_frame *frame) { in nghttp2_http_on_request_headers()
471 if (frame->hd.type == NGHTTP2_PUSH_PROMISE) { in nghttp2_http_on_request_headers()
509 nghttp2_frame *frame) { in nghttp2_http_on_trailer_headers()
512 if ((frame->hd.flags & NGHTTP2_FLAG_END_STREAM) == 0) { in nghttp2_http_on_trailer_headers()
545 nghttp2_frame *frame) { in nghttp2_http_record_request_method()
550 switch (frame->hd.type) { in nghttp2_http_record_request_method()
552 nva = frame in nghttp2_http_record_request_method()
350 nghttp2_http_on_header(nghttp2_session *session, nghttp2_stream *stream, nghttp2_frame *frame, nghttp2_hd_nv *nv, int trailer) nghttp2_http_on_header() argument
444 nghttp2_http_on_request_headers(nghttp2_stream *stream, nghttp2_frame *frame) nghttp2_http_on_request_headers() argument
508 nghttp2_http_on_trailer_headers(nghttp2_stream *stream, nghttp2_frame *frame) nghttp2_http_on_trailer_headers() argument
544 nghttp2_http_record_request_method(nghttp2_stream *stream, nghttp2_frame *frame) nghttp2_http_record_request_method() argument
[all...]
/third_party/node/deps/nghttp2/lib/
H A Dnghttp2_http.c351 nghttp2_frame *frame, nghttp2_hd_nv *nv, in nghttp2_http_on_header()
390 if (session->server || frame->hd.type == NGHTTP2_PUSH_PROMISE) { in nghttp2_http_on_header()
435 if (session->server || frame->hd.type == NGHTTP2_PUSH_PROMISE) { in nghttp2_http_on_header()
445 nghttp2_frame *frame) { in nghttp2_http_on_request_headers()
471 if (frame->hd.type == NGHTTP2_PUSH_PROMISE) { in nghttp2_http_on_request_headers()
509 nghttp2_frame *frame) { in nghttp2_http_on_trailer_headers()
512 if ((frame->hd.flags & NGHTTP2_FLAG_END_STREAM) == 0) { in nghttp2_http_on_trailer_headers()
545 nghttp2_frame *frame) { in nghttp2_http_record_request_method()
550 switch (frame->hd.type) { in nghttp2_http_record_request_method()
552 nva = frame in nghttp2_http_record_request_method()
350 nghttp2_http_on_header(nghttp2_session *session, nghttp2_stream *stream, nghttp2_frame *frame, nghttp2_hd_nv *nv, int trailer) nghttp2_http_on_header() argument
444 nghttp2_http_on_request_headers(nghttp2_stream *stream, nghttp2_frame *frame) nghttp2_http_on_request_headers() argument
508 nghttp2_http_on_trailer_headers(nghttp2_stream *stream, nghttp2_frame *frame) nghttp2_http_on_trailer_headers() argument
544 nghttp2_http_record_request_method(nghttp2_stream *stream, nghttp2_frame *frame) nghttp2_http_record_request_method() argument
[all...]
/third_party/mesa3d/src/intel/common/
H A Dintel_measure.c51 { "frame", INTEL_MEASURE_FRAME },
122 fprintf(stderr, "INTEL_MEASURE start frame may " in intel_measure_init()
135 fprintf(stderr, "INTEL_MEASURE count frame must be positive: %d\n", in intel_measure_init()
210 fputs("draw_start,draw_end,frame,batch," in intel_measure_init()
217 device->frame = 0; in intel_measure_init()
264 * - at frame boundaries
319 * Notify intel_measure that a frame is about to begin.
321 * Configuration values and the control fifo may commence measurement at frame
325 intel_measure_frame_transition(unsigned frame) in intel_measure_frame_transition() argument
327 if (frame in intel_measure_frame_transition()
[all...]
/third_party/python/Lib/
H A Dcgitb.py11 context - number of lines of source code to show for each stack frame
69 def lookup(name, frame, locals):
73 if name in frame.f_globals:
74 return 'global', frame.f_globals[name]
75 if '__builtins__' in frame.f_globals:
76 builtins = frame.f_globals['__builtins__']
85 def scanvars(reader, frame, locals):
96 where, value = lookup(token, frame, locals)
129 for frame, file, lnum, func, lines, index in records:
135 args, varargs, varkw, locals = inspect.getargvalues(frame)
[all...]
/third_party/node/deps/v8/src/runtime/
H A Druntime-test-wasm.cc150 // Find the caller wasm frame. in RUNTIME_FUNCTION()
155 WasmFrame* frame = WasmFrame::cast(it.frame()); in RUNTIME_FUNCTION() local
158 int func_index = frame->function_index(); in RUNTIME_FUNCTION()
159 const wasm::WasmModule* module = frame->wasm_instance().module(); in RUNTIME_FUNCTION()
161 wasm::ModuleWireBytes(frame->native_module()->wire_bytes()); in RUNTIME_FUNCTION()
166 wasm::WasmCode* code = frame->wasm_code(); in RUNTIME_FUNCTION()
187 // Find the caller wasm frame. in RUNTIME_FUNCTION()
192 WasmFrame* frame = WasmFrame::cast(it.frame()); in RUNTIME_FUNCTION() local
416 WasmFrame* frame = WasmFrame::cast(it.frame()); RUNTIME_FUNCTION() local
[all...]
/third_party/skia/third_party/externals/libwebp/src/mux/
H A Dmuxedit.c96 // Create data for frame given image data, offsets and duration.
99 WebPData* const frame) { in CreateFrameData()
120 frame->bytes = frame_bytes; in CreateFrameData()
121 frame->size = frame_size; in CreateFrameData()
283 return WEBP_MUX_INVALID_ARGUMENT; // Conflicting frame types. in WebPMuxPushFrame()
293 WebPData frame; in WebPMuxPushFrame() local
305 err = CreateFrameData(wpi.width_, wpi.height_, &tmp, &frame); in WebPMuxPushFrame()
307 // Add frame chunk (with copy_data = 1). in WebPMuxPushFrame()
308 err = AddDataToChunkList(&frame, 1, tag, &wpi.header_); in WebPMuxPushFrame()
309 WebPDataClear(&frame); // fram in WebPMuxPushFrame()
97 CreateFrameData( int width, int height, const WebPMuxFrameInfo* const info, WebPData* const frame) CreateFrameData() argument
555 WebPMuxImage* frame = NULL; MuxCleanup() local
[all...]
/third_party/node/deps/v8/src/builtins/
H A Daccessors.cc407 Handle<JSObject> ArgumentsForInlinedFunction(JavaScriptFrame* frame, in ArgumentsForInlinedFunction() argument
409 Isolate* isolate = frame->isolate(); in ArgumentsForInlinedFunction()
412 TranslatedState translated_values(frame); in ArgumentsForInlinedFunction()
413 translated_values.Prepare(frame->fp()); in ArgumentsForInlinedFunction()
434 // If we materialize any object, we should deoptimize the frame because we in ArgumentsForInlinedFunction()
444 translated_values.StoreMaterializedValuesAndDeopt(frame); in ArgumentsForInlinedFunction()
451 int FindFunctionInFrame(JavaScriptFrame* frame, Handle<JSFunction> function) { in FindFunctionInFrame() argument
453 frame->Summarize(&frames); in FindFunctionInFrame()
465 JavaScriptFrame* frame = it->frame(); in GetFrameArguments() local
503 FunctionGetArguments(JavaScriptFrame* frame, int inlined_jsframe_index) FunctionGetArguments() argument
527 JavaScriptFrame* frame = it.frame(); FunctionArgumentsGetter() local
[all...]
/third_party/node/test/common/
H A Dprof.js34 return { frame: first, roots: profile.head.children };
38 const { frame, roots } = findFirstFrame(file, func);
39 if (!frame) {
44 assert.notStrictEqual(frame, undefined);
/third_party/skia/third_party/externals/angle2/samples/capture_replay/
H A DCaptureReplay.cpp6 // CaptureReplay: Template for replaying a frame capture with ANGLE.
61 // Compute the current frame, looping from kReplayFrameStart to kReplayFrameEnd.
62 uint32_t frame = variable
64 if (mPreviousFrame > frame)
68 ReplayContextFrame(frame); variable
69 mPreviousFrame = frame;
/third_party/elfutils/tests/
H A Ddwarfcfi.c82 Dwarf_Frame *frame; in handle_address() local
83 int result = dwarf_cfi_addrframe (cfi, pc, &frame); in handle_address()
93 int ra_regno = dwarf_frame_info (frame, &start, &end, &signalp); in handle_address()
103 ra_regno, signalp ? " (signal frame)" : ""); in handle_address()
110 result = dwarf_frame_cfa (frame, &cfa_ops, &cfa_nops); in handle_address()
122 int reg_result = dwarf_frame_register (frame, r, ops_mem, &ops, &nops); in handle_address()
127 free (frame); in handle_address()

Completed in 18 milliseconds

1...<<11121314151617181920>>...61