Home
last modified time | relevance | path

Searched refs:frame (Results 76 - 100 of 1891) sorted by relevance

12345678910>>...76

/foundation/CastEngine/castengine_wifi_display/services/protocol/rtp/src/
H A Drtp_codec_h264.cpp20 #include "frame/h264_frame.h"
50 auto frame = rtp->GetPayload(); in InputRtp() local
54 int32_t nal = H264_TYPE(frame[0]); in InputRtp()
60 UnpackStapA(rtp, frame + 1, length - 1, stamp); in InputRtp()
63 UnpackFuA(rtp, frame, length, stamp, seq); in InputRtp()
68 UnpackSingle(rtp, frame, length, stamp); in InputRtp()
163 auto frame = std::make_shared<H264Frame>(); in ObtainFrame() local
164 frame->prefixSize_ = 4; // 4:fixed size in ObtainFrame()
165 return frame; in ObtainFrame()
168 void RtpDecoderH264::OutputFrame(const RtpPacket::Ptr &rtp, const H264Frame::Ptr &frame) in OutputFrame() argument
189 InputFrame(const Frame::Ptr &frame) InputFrame() argument
227 InputFrame(const Frame::Ptr &frame, bool isMark) InputFrame() argument
[all...]
/third_party/ffmpeg/libavfilter/
H A Daf_asetnsamples.c45 { "nb_out_samples", "set the number of per-frame output samples", OFFSET(nb_out_samples), AV_OPT_TYPE_INT, {.i64=1024}, 1, INT_MAX, FLAGS },
46 { "n", "set the number of per-frame output samples", OFFSET(nb_out_samples), AV_OPT_TYPE_INT, {.i64=1024}, 1, INT_MAX, FLAGS },
47 { "pad", "pad last frame with zeros", OFFSET(pad), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, FLAGS },
48 { "p", "pad last frame with zeros", OFFSET(pad), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, FLAGS },
59 AVFrame *frame = NULL, *pad_frame; in activate() local
64 ret = ff_inlink_consume_samples(inlink, s->nb_out_samples, s->nb_out_samples, &frame); in activate()
69 if (!s->pad || frame->nb_samples == s->nb_out_samples) in activate()
70 return ff_filter_frame(outlink, frame); in activate()
74 av_frame_free(&frame); in activate()
78 ret = av_frame_copy_props(pad_frame, frame); in activate()
[all...]
H A Dqsvvpp.c45 QSVFrame *frame; member
231 static int map_frame_to_surface(AVFrame *frame, mfxFrameSurface1 *surface) in map_frame_to_surface() argument
233 switch (frame->format) { in map_frame_to_surface()
236 surface->Data.Y = frame->data[0]; in map_frame_to_surface()
237 surface->Data.UV = frame->data[1]; in map_frame_to_surface()
240 surface->Data.Y = frame->data[0]; in map_frame_to_surface()
241 surface->Data.U = frame->data[1]; in map_frame_to_surface()
242 surface->Data.V = frame->data[2]; in map_frame_to_surface()
245 surface->Data.Y = frame->data[0]; in map_frame_to_surface()
246 surface->Data.U = frame in map_frame_to_surface()
327 QSVFrame *frame; clear_frame_list() local
[all...]
H A Dvf_monochrome.c83 AVFrame *frame = arg; in monochrome_slice8() local
89 const int width = frame->width; in monochrome_slice8()
90 const int height = frame->height; in monochrome_slice8()
93 const int ylinesize = frame->linesize[0]; in monochrome_slice8()
94 const int ulinesize = frame->linesize[1]; in monochrome_slice8()
95 const int vlinesize = frame->linesize[2]; in monochrome_slice8()
96 uint8_t *yptr = frame->data[0] + slice_start * ylinesize; in monochrome_slice8()
104 uint8_t *uptr = frame->data[1] + cy * ulinesize; in monochrome_slice8()
105 uint8_t *vptr = frame->data[2] + cy * vlinesize; in monochrome_slice8()
122 AVFrame *frame in monochrome_slice16() local
161 AVFrame *frame = arg; clear_slice8() local
187 AVFrame *frame = arg; clear_slice16() local
212 filter_frame(AVFilterLink *inlink, AVFrame *frame) filter_frame() argument
[all...]
H A Dvf_blackframe.c44 unsigned int frame; ///< frame number member
46 unsigned int last_keyframe; ///< frame number of the last received key-frame
59 static int filter_frame(AVFilterLink *inlink, AVFrame *frame) in filter_frame() argument
65 uint8_t *p = frame->data[0]; in filter_frame()
69 for (i = 0; i < frame->height; i++) { in filter_frame()
72 p += frame->linesize[0]; in filter_frame()
75 if (frame->key_frame) in filter_frame()
76 s->last_keyframe = s->frame; in filter_frame()
[all...]
H A Dvf_fieldorder.c70 static int filter_frame(AVFilterLink *inlink, AVFrame *frame) in filter_frame() argument
79 if (!frame->interlaced_frame || in filter_frame()
80 frame->top_field_first == s->dst_tff) { in filter_frame()
83 frame->interlaced_frame ? in filter_frame()
84 "frame with same field order" : "progressive frame"); in filter_frame()
85 return ff_filter_frame(outlink, frame); in filter_frame()
88 if (av_frame_is_writable(frame)) { in filter_frame()
89 out = frame; in filter_frame()
93 av_frame_free(&frame); in filter_frame()
[all...]
H A Dvf_scdet.c107 static double get_scene_score(AVFilterContext *ctx, AVFrame *frame) in get_scene_score() argument
113 if (prev_picref && frame->height == prev_picref->height in get_scene_score()
114 && frame->width == prev_picref->width) { in get_scene_score()
122 frame->data[plane], frame->linesize[plane], in get_scene_score()
135 s->prev_picref = av_frame_clone(frame); in get_scene_score()
139 static int set_meta(SCDetContext *s, AVFrame *frame, const char *key, const char *value) in set_meta() argument
141 return av_dict_set(&frame->metadata, key, value, 0); in set_meta()
150 AVFrame *frame; in activate() local
154 ret = ff_inlink_consume_frame(inlink, &frame); in activate()
[all...]
H A Dvsrc_sierpinski.c60 {"size", "set frame size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str="640x480"}, 0, 0, FLAGS },
61 {"s", "set frame size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str="640x480"}, 0, 0, FLAGS },
62 {"rate", "set frame rate", OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str="25"}, 0, INT_MAX, FLAGS },
63 {"r", "set frame rate", OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str="25"}, 0, INT_MAX, FLAGS },
93 AVFrame *frame = arg; in draw_triangle_slice() local
94 const int width = frame->width; in draw_triangle_slice()
95 const int height = frame->height; in draw_triangle_slice()
98 uint8_t *dst = frame->data[0] + start * frame->linesize[0]; in draw_triangle_slice()
109 dst += frame in draw_triangle_slice()
118 AVFrame *frame = arg; draw_carpet_slice() local
162 draw_sierpinski(AVFilterContext *ctx, AVFrame *frame) draw_sierpinski() argument
192 AVFrame *frame = ff_get_video_buffer(link, s->w, s->h); sierpinski_request_frame() local
[all...]
/base/hiviewdfx/faultloggerd/tools/process_dump/
H A Ddfx_stack_info_formatter.cpp38 void FillJsFrame(const DfxFrame& frame, Json::Value& jsonInfo) in FillJsFrame() argument
41 frameJson["file"] = frame.mapName; in FillJsFrame()
42 frameJson["symbol"] = frame.funcName; in FillJsFrame()
43 frameJson["line"] = frame.line; in FillJsFrame()
44 frameJson["column"] = frame.column; in FillJsFrame()
146 for (const auto& frame : threadFrames) { in FillFrames()
147 if (frame.isJsFrame) { in FillFrames()
148 FillJsFrame(frame, jsonInfo); in FillFrames()
151 FillNativeFrame(frame, jsonInfo); in FillFrames()
153 if (Printer::IsLastValidFrame(frame)) { in FillFrames()
161 FillNativeFrame(const DfxFrame& frame, Json::Value& jsonInfo) const FillNativeFrame() argument
[all...]
/third_party/node/test/fixtures/wpt/FileAPI/url/
H A Durl-reload.window.js8 const frame = document.createElement('iframe');
9 frame.setAttribute('src', url);
10 frame.setAttribute('style', 'display:none;');
11 document.body.appendChild(frame);
13 frame.onload = t.step_func(() => {
16 assert_equals(frame.contentWindow.test_result, run_result);
17 frame.contentWindow.test_result = null;
18 frame.onload = t.step_func_done(() => {
19 assert_equals(frame.contentWindow.test_result, run_result);
24 frame
[all...]
H A Durl-in-tags-revoke.window.js9 const frame = document.createElement('iframe');
10 frame.setAttribute('src', url);
11 frame.setAttribute('style', 'display:none;');
12 document.body.appendChild(frame);
15 frame.onload = t.step_func_done(() => {
16 assert_equals(frame.contentWindow.test_result, run_result);
27 const frame = document.createElement('iframe');
28 frame.setAttribute('src', '/common/blank.html');
29 frame.setAttribute('style', 'display:none;');
30 document.body.appendChild(frame);
[all...]
/third_party/ffmpeg/libavformat/
H A Dvapoursynth.c302 AVFrame *frame = (AVFrame *)data; in free_frame() local
304 av_frame_free(&frame); in free_frame()
319 const VSFrameRef *frame; member
327 if (d->frame) in free_vsframe_ref()
328 d->vsapi->freeFrame(d->frame); in free_vsframe_ref()
339 AVFrame *frame = NULL; in read_packet_vs() local
369 av_log(s, AV_LOG_ERROR, "Error getting frame: %s\n", vserr); in read_packet_vs()
375 ref_data->frame = vsframe; in read_packet_vs()
385 frame = av_frame_alloc(); in read_packet_vs()
386 if (!frame) { in read_packet_vs()
[all...]
/third_party/ffmpeg/libavcodec/
H A Dlibdavs2.c34 AVFrame *frame; member
38 davs2_picture_t out_frame; // output data, frame data
65 davs2_seq_info_t *headerset, int ret_type, AVFrame *frame) in davs2_dump_frames()
98 frame->pict_type = AV_PICTURE_TYPE_I; in davs2_dump_frames()
102 frame->pict_type = AV_PICTURE_TYPE_P; in davs2_dump_frames()
105 frame->pict_type = AV_PICTURE_TYPE_B; in davs2_dump_frames()
108 frame->pict_type = AV_PICTURE_TYPE_S; in davs2_dump_frames()
111 av_log(avctx, AV_LOG_ERROR, "Decoder error: unknown frame type\n"); in davs2_dump_frames()
117 frame->buf[plane] = av_buffer_alloc(size_line * pic->lines[plane]); in davs2_dump_frames()
119 if (!frame in davs2_dump_frames()
64 davs2_dump_frames(AVCodecContext *avctx, davs2_picture_t *pic, int *got_frame, davs2_seq_info_t *headerset, int ret_type, AVFrame *frame) davs2_dump_frames() argument
157 send_delayed_frame(AVCodecContext *avctx, AVFrame *frame, int *got_frame) send_delayed_frame() argument
188 davs2_decode_frame(AVCodecContext *avctx, AVFrame *frame, int *got_frame, AVPacket *avpkt) davs2_decode_frame() argument
[all...]
H A Dinterplayvideo.c32 * An Interplay video frame consists of 2 parts: The decoding map and
107 static int ipvideo_decode_block_opcode_0x0(IpvideoContext *s, AVFrame *frame) in ipvideo_decode_block_opcode_0x0() argument
109 return copy_from(s, s->last_frame, frame, 0, 0); in ipvideo_decode_block_opcode_0x0()
112 static int ipvideo_decode_block_opcode_0x1(IpvideoContext *s, AVFrame *frame) in ipvideo_decode_block_opcode_0x1() argument
114 return copy_from(s, s->second_last_frame, frame, 0, 0); in ipvideo_decode_block_opcode_0x1()
117 static int ipvideo_decode_block_opcode_0x2(IpvideoContext *s, AVFrame *frame) in ipvideo_decode_block_opcode_0x2() argument
138 return copy_from(s, s->second_last_frame, frame, x, y); in ipvideo_decode_block_opcode_0x2()
141 static int ipvideo_decode_block_opcode_0x3(IpvideoContext *s, AVFrame *frame) in ipvideo_decode_block_opcode_0x3() argument
146 /* copy 8x8 block from current frame from an up/left block */ in ipvideo_decode_block_opcode_0x3()
164 return copy_from(s, frame, fram in ipvideo_decode_block_opcode_0x3()
167 ipvideo_decode_block_opcode_0x4(IpvideoContext *s, AVFrame *frame) ipvideo_decode_block_opcode_0x4() argument
188 ipvideo_decode_block_opcode_0x5(IpvideoContext *s, AVFrame *frame) ipvideo_decode_block_opcode_0x5() argument
201 ipvideo_decode_block_opcode_0x6(IpvideoContext *s, AVFrame *frame) ipvideo_decode_block_opcode_0x6() argument
210 ipvideo_decode_block_opcode_0x7(IpvideoContext *s, AVFrame *frame) ipvideo_decode_block_opcode_0x7() argument
254 ipvideo_decode_block_opcode_0x8(IpvideoContext *s, AVFrame *frame) ipvideo_decode_block_opcode_0x8() argument
332 ipvideo_decode_block_opcode_0x9(IpvideoContext *s, AVFrame *frame) ipvideo_decode_block_opcode_0x9() argument
402 ipvideo_decode_block_opcode_0xA(IpvideoContext *s, AVFrame *frame) ipvideo_decode_block_opcode_0xA() argument
468 ipvideo_decode_block_opcode_0xB(IpvideoContext *s, AVFrame *frame) ipvideo_decode_block_opcode_0xB() argument
482 ipvideo_decode_block_opcode_0xC(IpvideoContext *s, AVFrame *frame) ipvideo_decode_block_opcode_0xC() argument
501 ipvideo_decode_block_opcode_0xD(IpvideoContext *s, AVFrame *frame) ipvideo_decode_block_opcode_0xD() argument
526 ipvideo_decode_block_opcode_0xE(IpvideoContext *s, AVFrame *frame) ipvideo_decode_block_opcode_0xE() argument
543 ipvideo_decode_block_opcode_0xF(IpvideoContext *s, AVFrame *frame) ipvideo_decode_block_opcode_0xF() argument
564 ipvideo_decode_block_opcode_0x6_16(IpvideoContext *s, AVFrame *frame) ipvideo_decode_block_opcode_0x6_16() argument
576 ipvideo_decode_block_opcode_0x7_16(IpvideoContext *s, AVFrame *frame) ipvideo_decode_block_opcode_0x7_16() argument
613 ipvideo_decode_block_opcode_0x8_16(IpvideoContext *s, AVFrame *frame) ipvideo_decode_block_opcode_0x8_16() argument
689 ipvideo_decode_block_opcode_0x9_16(IpvideoContext *s, AVFrame *frame) ipvideo_decode_block_opcode_0x9_16() argument
756 ipvideo_decode_block_opcode_0xA_16(IpvideoContext *s, AVFrame *frame) ipvideo_decode_block_opcode_0xA_16() argument
822 ipvideo_decode_block_opcode_0xB_16(IpvideoContext *s, AVFrame *frame) ipvideo_decode_block_opcode_0xB_16() argument
838 ipvideo_decode_block_opcode_0xC_16(IpvideoContext *s, AVFrame *frame) ipvideo_decode_block_opcode_0xC_16() argument
858 ipvideo_decode_block_opcode_0xD_16(IpvideoContext *s, AVFrame *frame) ipvideo_decode_block_opcode_0xD_16() argument
879 ipvideo_decode_block_opcode_0xE_16(IpvideoContext *s, AVFrame *frame) ipvideo_decode_block_opcode_0xE_16() argument
920 ipvideo_format_06_firstpass(IpvideoContext *s, AVFrame *frame, int16_t opcode) ipvideo_format_06_firstpass() argument
936 ipvideo_format_06_secondpass(IpvideoContext *s, AVFrame *frame, int16_t opcode) ipvideo_format_06_secondpass() argument
955 ipvideo_decode_format_06_opcodes(IpvideoContext *s, AVFrame *frame) ipvideo_decode_format_06_opcodes() argument
994 ipvideo_format_10_firstpass(IpvideoContext *s, AVFrame *frame, int16_t opcode) ipvideo_format_10_firstpass() argument
1006 ipvideo_format_10_secondpass(IpvideoContext *s, AVFrame *frame, int16_t opcode) ipvideo_format_10_secondpass() argument
1025 ipvideo_decode_format_10_opcodes(IpvideoContext *s, AVFrame *frame) ipvideo_decode_format_10_opcodes() argument
1106 ipvideo_decode_format_11_opcodes(IpvideoContext *s, AVFrame *frame) ipvideo_decode_format_11_opcodes() argument
1192 ipvideo_decode_frame(AVCodecContext *avctx, AVFrame *frame, int *got_frame, AVPacket *avpkt) ipvideo_decode_frame() argument
[all...]
H A Deatgq.c116 static void tgq_idct_put_mb(TgqContext *s, int16_t (*block)[64], AVFrame *frame, in tgq_idct_put_mb() argument
119 ptrdiff_t linesize = frame->linesize[0]; in tgq_idct_put_mb()
120 uint8_t *dest_y = frame->data[0] + (mb_y * 16 * linesize) + mb_x * 16; in tgq_idct_put_mb()
121 uint8_t *dest_cb = frame->data[1] + (mb_y * 8 * frame->linesize[1]) + mb_x * 8; in tgq_idct_put_mb()
122 uint8_t *dest_cr = frame->data[2] + (mb_y * 8 * frame->linesize[2]) + mb_x * 8; in tgq_idct_put_mb()
129 ff_ea_idct_put_c(dest_cb, frame->linesize[1], block[4]); in tgq_idct_put_mb()
130 ff_ea_idct_put_c(dest_cr, frame->linesize[2], block[5]); in tgq_idct_put_mb()
143 static void tgq_idct_put_mb_dconly(TgqContext *s, AVFrame *frame, in tgq_idct_put_mb_dconly() argument
160 tgq_decode_mb(TgqContext *s, AVFrame *frame, int mb_y, int mb_x) tgq_decode_mb() argument
212 tgq_decode_frame(AVCodecContext *avctx, AVFrame *frame, int *got_frame, AVPacket *avpkt) tgq_decode_frame() argument
[all...]
H A Dilbcdec.c95 ILBCFrame frame; member
132 ILBCFrame *frame = &s->frame; in unpack_frame() local
136 frame->lsf[0] = get_bits(gb, 6); in unpack_frame()
137 frame->lsf[1] = get_bits(gb, 7); in unpack_frame()
138 frame->lsf[2] = get_bits(gb, 7); in unpack_frame()
141 frame->start = get_bits(gb, 2); in unpack_frame()
142 frame->state_first = get_bits1(gb); in unpack_frame()
143 frame->ifm = get_bits(gb, 6); in unpack_frame()
144 frame in unpack_frame()
1359 ilbc_decode_frame(AVCodecContext *avctx, AVFrame *frame, int *got_frame_ptr, AVPacket *avpkt) ilbc_decode_frame() argument
[all...]
H A Dmidivid.c45 AVFrame *frame; member
48 static int decode_mvdv(MidiVidContext *s, AVCodecContext *avctx, AVFrame *frame) in decode_mvdv() argument
111 uint8_t *dsty = frame->data[0] + y * frame->linesize[0]; in decode_mvdv()
112 uint8_t *dstu = frame->data[1] + y * frame->linesize[1]; in decode_mvdv()
113 uint8_t *dstv = frame->data[2] + y * frame->linesize[2]; in decode_mvdv()
135 dsty[x +frame->linesize[0]] = vec[idx * 12 + 0]; in decode_mvdv()
136 dsty[x+1+frame in decode_mvdv()
196 AVFrame *frame = s->frame; decode_frame() local
[all...]
H A Dmediacodec_sw_buffer.c26 #include "libavutil/frame.h"
81 AVFrame *frame) in ff_mediacodec_sw_buffer_copy_yuv420_planar()
110 if (frame->linesize[i] == stride) { in ff_mediacodec_sw_buffer_copy_yuv420_planar()
111 memcpy(frame->data[i], src, height * stride); in ff_mediacodec_sw_buffer_copy_yuv420_planar()
114 uint8_t *dst = frame->data[i]; in ff_mediacodec_sw_buffer_copy_yuv420_planar()
119 width = FFMIN(frame->linesize[i], FFALIGN(avctx->width, 2) / 2); in ff_mediacodec_sw_buffer_copy_yuv420_planar()
125 dst += frame->linesize[i]; in ff_mediacodec_sw_buffer_copy_yuv420_planar()
136 AVFrame *frame) in ff_mediacodec_sw_buffer_copy_yuv420_semi_planar()
158 if (frame->linesize[i] == s->stride) { in ff_mediacodec_sw_buffer_copy_yuv420_semi_planar()
159 memcpy(frame in ff_mediacodec_sw_buffer_copy_yuv420_semi_planar()
76 ff_mediacodec_sw_buffer_copy_yuv420_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame) ff_mediacodec_sw_buffer_copy_yuv420_planar() argument
131 ff_mediacodec_sw_buffer_copy_yuv420_semi_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame) ff_mediacodec_sw_buffer_copy_yuv420_semi_planar() argument
181 ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame) ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar() argument
272 ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar_64x32Tile2m8ka(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame) ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar_64x32Tile2m8ka() argument
[all...]
/foundation/multimedia/audio_framework/services/audio_service/common/src/
H A Dlinear_pos_time_model.cpp53 void LinearPosTimeModel::ResetFrameStamp(uint64_t frame, int64_t nanoTime) in ResetFrameStamp() argument
55 AUDIO_INFO_LOG("Reset frame:%{public}" PRIu64" with time:%{public}" PRId64".", frame, nanoTime); in ResetFrameStamp()
56 stampFrame_ = frame; in ResetFrameStamp()
61 bool LinearPosTimeModel::IsReasonable(uint64_t frame, int64_t nanoTime) in IsReasonable() argument
63 if (frame == stampFrame_ && nanoTime == stampNanoTime_) { in IsReasonable()
68 if (frame > stampFrame_) { in IsReasonable()
69 deltaFrame = static_cast<int64_t>(frame - stampFrame_); in IsReasonable()
71 deltaFrame = -static_cast<int64_t>(stampFrame_ - frame); in IsReasonable()
83 bool LinearPosTimeModel::UpdataFrameStamp(uint64_t frame, int64_ argument
97 GetFrameStamp(uint64_t &frame, int64_t &nanoTime) GetFrameStamp() argument
[all...]
/foundation/CastEngine/castengine_wifi_display/tests/demo/rtp/
H A Dh264_rtp_dec_demo.cpp27 #include "frame/frame.h"
104 aacunPack->SetOnRtpUnpack([=](uint32_t ssrc, const Frame::Ptr &frame) { in main()
106 if (frame->GetTrackType() == TRACK_AUDIO) { in main()
107 printf("aac data: len: %d dts: %d", frame->Size(), frame->Dts()); in main()
108 auto data = frame->Data(); in main()
109 auto bytes = frame->Size(); in main()
117 fwrite(frame->Data(), frame in main()
[all...]
/third_party/skia/tools/debugger/
H A DDebugLayerManager.h26 // Animations are expected to, but may not always use a layer on more than frame.
28 // layer as it was on any frame. Draws may be partial, meaning their commands were clipped to not
31 // Clients may ask for a rendering of a given layer by its RenderNode id and frame, and
32 // this class will return a rendering of how it looked on that frame.
41 // Store an SkPicture under a given nodeId (and under the currently set frame number)
44 void storeSkPicture(int nodeId, int frame, sk_sp<SkPicture> picture, SkIRect dirty);
47 void setCommand(int nodeId, int frame, int command);
49 void drawLayerEventTo(SkSurface*, const int nodeId, const int frame);
51 // getLayerAsImage draws the given layer as it would have looked on frame and returns an image.
55 // For example: Say we are drawing a layer at frame 1
118 int frame; // frame of animation on which this event was recorded. global() member
[all...]
/foundation/communication/bluetooth_service/services/bluetooth/service/src/gavdp/a2dp_codec/sbclib/src/
H A Dsbc_decoder.cpp69 void Decoder::Init(const Frame& frame) in Init() argument
73 for (int i = 0; i < frame.subbands_ * BIT16_BYTE2; i++) { in Init()
120 int Decoder::Synthesize(const Frame& frame) in Synthesize() argument
122 switch (frame.subbands_) { in Synthesize()
124 for (int channel = 0; channel < frame.channels_; channel++) { in Synthesize()
125 for (int blk = 0; blk < frame.blocks_; blk++) { in Synthesize()
126 Synthesize4(frame, channel, blk); in Synthesize()
131 for (int channel = 0; channel < frame.channels_; channel++) { in Synthesize()
132 for (int blk = 0; blk < frame.blocks_; blk++) { in Synthesize()
133 Synthesize8(frame, channe in Synthesize()
154 Synthesize4(const Frame &frame, int ch, int blk) Synthesize4() argument
197 Synthesize8(const Frame &frame, int ch, int blk) Synthesize8() argument
[all...]
/third_party/nghttp2/tests/
H A Dnghttp2_frame_test.c74 nghttp2_headers frame, oframe; in test_nghttp2_frame_pack_headers() local
97 &frame, NGHTTP2_FLAG_END_STREAM | NGHTTP2_FLAG_END_HEADERS, 1000000007, in test_nghttp2_frame_pack_headers()
99 rv = nghttp2_frame_pack_headers(&bufs, &frame, &deflater); in test_nghttp2_frame_pack_headers()
128 nghttp2_priority_spec_init(&frame.pri_spec, 1000000009, 12, 1); in test_nghttp2_frame_pack_headers()
129 frame.hd.flags |= NGHTTP2_FLAG_PRIORITY; in test_nghttp2_frame_pack_headers()
131 rv = nghttp2_frame_pack_headers(&bufs, &frame, &deflater); in test_nghttp2_frame_pack_headers()
163 nghttp2_frame_headers_free(&frame, mem); in test_nghttp2_frame_pack_headers()
170 nghttp2_headers frame; in test_nghttp2_frame_pack_headers_frame_too_large() local
196 &frame, NGHTTP2_FLAG_END_STREAM | NGHTTP2_FLAG_END_HEADERS, 1000000007, in test_nghttp2_frame_pack_headers_frame_too_large()
198 rv = nghttp2_frame_pack_headers(&bufs, &frame, in test_nghttp2_frame_pack_headers_frame_too_large()
210 nghttp2_priority frame, oframe; test_nghttp2_frame_pack_priority() local
239 nghttp2_rst_stream frame, oframe; test_nghttp2_frame_pack_rst_stream() local
275 nghttp2_settings frame, oframe; test_nghttp2_frame_pack_settings() local
312 nghttp2_push_promise frame, oframe; test_nghttp2_frame_pack_push_promise() local
360 nghttp2_ping frame, oframe; test_nghttp2_frame_pack_ping() local
381 nghttp2_goaway frame, oframe; test_nghttp2_frame_pack_goaway() local
429 nghttp2_window_update frame, oframe; test_nghttp2_frame_pack_window_update() local
449 nghttp2_extension frame, oframe; test_nghttp2_frame_pack_altsvc() local
502 nghttp2_extension frame, oframe; test_nghttp2_frame_pack_origin() local
594 nghttp2_extension frame, oframe; test_nghttp2_frame_pack_priority_update() local
[all...]
/foundation/graphic/graphic_2d/rosen/modules/render_service_base/src/platform/ohos/backend/
H A Drs_surface_ohos_raster.cpp47 std::unique_ptr<RSSurfaceFrameOhosRaster> frame = std::make_unique<RSSurfaceFrameOhosRaster>(width, height); in RequestFrame() local
48 frame->requestConfig_.usage = bufferUsage_; in RequestFrame()
49 frame->requestConfig_.format = pixelFormat_; in RequestFrame()
50 SurfaceError err = producer_->RequestBuffer(frame->buffer_, frame->releaseFence_, frame->requestConfig_); in RequestFrame()
56 err = frame->buffer_->Map(); in RequestFrame()
62 sptr<SyncFence> tempFence = new SyncFence(frame->releaseFence_); in RequestFrame()
67 std::unique_ptr<RSSurfaceFrame> ret(std::move(frame)); in RequestFrame()
71 void RSSurfaceOhosRaster::SetUiTimeStamp(const std::unique_ptr<RSSurfaceFrame>& frame, uint64_ argument
89 FlushFrame(std::unique_ptr<RSSurfaceFrame>& frame, uint64_t uiTimestamp) FlushFrame() argument
[all...]
/third_party/icu/icu4j/demos/src/com/ibm/icu/dev/demo/translit/
H A DDemoApplet.java35 Demo frame = null; field in DemoApplet
47 if (frame == null) { in init()
48 frame = new Demo(600, 200); in init()
49 frame.addWindowListener(new WindowAdapter() { in init()
51 frame = null; in init()
55 frame.setVisible(true); in init()
56 frame.toFront(); in init()
70 if (frame != null) { in stop()
71 frame.dispose(); in stop()
73 frame in stop()
[all...]

Completed in 14 milliseconds

12345678910>>...76