/third_party/node/test/parallel/ |
H A D | test-http-1.0-keep-alive.js | 120 const ctx = test.responses[current]; 121 console.error('< SERVER SENDING RESPONSE', ctx); 122 res.writeHead(200, ctx.headers); 123 ctx.chunks.slice(0, -1).forEach(function(chunk) { res.write(chunk); }); 124 res.end(ctx.chunks[ctx.chunks.length - 1]); 133 const ctx = test.requests[current]; 134 console.error(' > CLIENT SENDING REQUEST', ctx); 136 conn.write(ctx.data); 140 if (!ctx [all...] |
/third_party/mesa3d/src/intel/ds/ |
H A D | intel_pps_perf.cc | 84 assert(!ctx && "Perf context should not be initialized at this point"); in open() 86 ctx = intel_perf_new_context(ralloc_ctx); in open() 87 intel_perf_init_context(ctx, cfg, nullptr, nullptr, nullptr, &devinfo, 0, drm_fd); in open() 91 return intel_perf_open(ctx, in open() 102 if (ctx) { in close() 103 intel_perf_close(ctx, nullptr); in close() 104 ctx = nullptr; in close() 110 assert(ctx && "Perf context was not open"); in oa_stream_ready() 111 return intel_perf_oa_stream_ready(ctx); in oa_stream_ready() 116 assert(ctx in read_oa_stream() [all...] |
/third_party/mesa3d/src/mesa/program/ |
H A D | program.h | 54 _mesa_init_program(struct gl_context *ctx); 57 _mesa_free_program_data(struct gl_context *ctx); 60 _mesa_update_default_objects_program(struct gl_context *ctx); 63 _mesa_set_program_error(struct gl_context *ctx, GLint pos, const char *string); 70 _mesa_new_program(struct gl_context *ctx, gl_shader_stage stage, GLuint id, 74 _mesa_delete_program(struct gl_context *ctx, struct gl_program *prog); 77 _mesa_lookup_program(struct gl_context *ctx, GLuint id); 80 _mesa_reference_program_(struct gl_context *ctx, 85 _mesa_reference_program(struct gl_context *ctx, in _mesa_reference_program() argument 90 _mesa_reference_program_(ctx, pt in _mesa_reference_program() [all...] |
/third_party/mesa3d/src/mesa/vbo/ |
H A D | vbo_noop.c | 105 is_vertex_position(const struct gl_context *ctx, GLuint index) in is_vertex_position() argument 110 #define ATTR_UNION(A, N, T, C, V0, V1, V2, V3) do { (void)ctx; (void)(A); } while(0) 111 #define ERROR(err) _mesa_error(ctx, err, __func__) 123 vbo_install_exec_vtxfmt_noop(struct gl_context *ctx) in vbo_install_exec_vtxfmt_noop() argument 130 struct _glapi_table *tab = ctx->Exec; in vbo_install_exec_vtxfmt_noop() 133 if (ctx->BeginEnd) { in vbo_install_exec_vtxfmt_noop() 134 tab = ctx->BeginEnd; in vbo_install_exec_vtxfmt_noop() 138 if (ctx->HWSelectModeBeginEnd) { in vbo_install_exec_vtxfmt_noop() 139 tab = ctx->HWSelectModeBeginEnd; in vbo_install_exec_vtxfmt_noop() 146 vbo_install_save_vtxfmt_noop(struct gl_context *ctx) in vbo_install_save_vtxfmt_noop() argument [all...] |
/third_party/mesa3d/src/gallium/drivers/etnaviv/ |
H A D | etnaviv_query_sw.c | 37 etna_sw_destroy_query(struct etna_context *ctx, struct etna_query *q) in etna_sw_destroy_query() argument 45 read_counter(struct etna_context *ctx, unsigned type) in read_counter() argument 49 return ctx->stats.prims_generated; in read_counter() 51 return ctx->stats.draw_calls; in read_counter() 53 return ctx->stats.rs_operations; in read_counter() 60 etna_sw_begin_query(struct etna_context *ctx, struct etna_query *q) in etna_sw_begin_query() argument 64 sq->begin_value = read_counter(ctx, q->type); in etna_sw_begin_query() 68 etna_sw_end_query(struct etna_context *ctx, struct etna_query *q) in etna_sw_end_query() argument 72 sq->end_value = read_counter(ctx, q->type); in etna_sw_end_query() 76 etna_sw_get_query_result(struct etna_context *ctx, struc argument 94 etna_sw_create_query(struct etna_context *ctx, unsigned query_type) etna_sw_create_query() argument [all...] |
/third_party/mesa3d/src/panfrost/lib/ |
H A D | pan_blitter.h | 103 struct pan_blit_context *ctx); 106 pan_blit_next_surface(struct pan_blit_context *ctx) in pan_blit_next_surface() argument 108 if (ctx->dst.last_layer < ctx->dst.layer_offset) { in pan_blit_next_surface() 109 if (ctx->dst.cur_layer <= ctx->dst.last_layer) in pan_blit_next_surface() 112 ctx->dst.cur_layer--; in pan_blit_next_surface() 114 if (ctx->dst.cur_layer >= ctx->dst.last_layer) in pan_blit_next_surface() 117 ctx in pan_blit_next_surface() [all...] |
/third_party/node/deps/openssl/openssl/providers/implementations/ciphers/ |
H A D | cipher_aes_ocb_hw.c | 21 CRYPTO_ocb128_cleanup(&ctx->ocb); \ 22 fn_set_enc_key(key, keylen * 8, &ctx->ksenc.ks); \ 23 fn_set_dec_key(key, keylen * 8, &ctx->ksdec.ks); \ 24 if (!CRYPTO_ocb128_init(&ctx->ocb, &ctx->ksenc.ks, &ctx->ksdec.ks, \ 26 ctx->base.enc ? (ocb128_f)fn_stream_enc : \ 29 ctx->key_set = 1 36 PROV_AES_OCB_CTX *ctx = (PROV_AES_OCB_CTX *)vctx; in cipher_hw_aes_ocb_generic_initkey() local 68 PROV_AES_OCB_CTX *ctx in cipher_hw_aes_ocb_aesni_initkey() local 91 PROV_AES_OCB_CTX *ctx = (PROV_AES_OCB_CTX *)vctx; cipher_hw_aes_ocb_t4_initkey() local [all...] |
/third_party/node/deps/openssl/openssl/include/openssl/ |
H A D | async.h | 57 void ASYNC_WAIT_CTX_free(ASYNC_WAIT_CTX *ctx); 58 int ASYNC_WAIT_CTX_set_wait_fd(ASYNC_WAIT_CTX *ctx, const void *key, 63 int ASYNC_WAIT_CTX_get_fd(ASYNC_WAIT_CTX *ctx, const void *key, 65 int ASYNC_WAIT_CTX_get_all_fds(ASYNC_WAIT_CTX *ctx, OSSL_ASYNC_FD *fd, 67 int ASYNC_WAIT_CTX_get_callback(ASYNC_WAIT_CTX *ctx, 70 int ASYNC_WAIT_CTX_set_callback(ASYNC_WAIT_CTX *ctx, 73 int ASYNC_WAIT_CTX_set_status(ASYNC_WAIT_CTX *ctx, int status); 74 int ASYNC_WAIT_CTX_get_status(ASYNC_WAIT_CTX *ctx); 75 int ASYNC_WAIT_CTX_get_changed_fds(ASYNC_WAIT_CTX *ctx, OSSL_ASYNC_FD *addfd, 78 int ASYNC_WAIT_CTX_clear_fd(ASYNC_WAIT_CTX *ctx, cons [all...] |
/third_party/node/deps/openssl/openssl/crypto/evp/ |
H A D | p_seal.c | 20 int EVP_SealInit(EVP_CIPHER_CTX *ctx, const EVP_CIPHER *type, in EVP_SealInit() argument 33 EVP_CIPHER_CTX_reset(ctx); in EVP_SealInit() 34 if (!EVP_EncryptInit_ex(ctx, type, NULL, NULL, NULL)) in EVP_SealInit() 37 if ((cipher = EVP_CIPHER_CTX_get0_cipher(ctx)) != NULL in EVP_SealInit() 43 if (EVP_CIPHER_CTX_rand_key(ctx, key) <= 0) in EVP_SealInit() 46 len = EVP_CIPHER_CTX_get_iv_length(ctx); in EVP_SealInit() 50 len = EVP_CIPHER_CTX_get_key_length(ctx); in EVP_SealInit() 54 if (!EVP_EncryptInit_ex(ctx, NULL, NULL, key, iv)) in EVP_SealInit() 80 int EVP_SealFinal(EVP_CIPHER_CTX *ctx, unsigned char *out, int *outl) in EVP_SealFinal() argument 83 i = EVP_EncryptFinal_ex(ctx, ou in EVP_SealFinal() [all...] |
/third_party/openssl/crypto/evp/ |
H A D | p_seal.c | 20 int EVP_SealInit(EVP_CIPHER_CTX *ctx, const EVP_CIPHER *type, in EVP_SealInit() argument 33 EVP_CIPHER_CTX_reset(ctx); in EVP_SealInit() 34 if (!EVP_EncryptInit_ex(ctx, type, NULL, NULL, NULL)) in EVP_SealInit() 37 if ((cipher = EVP_CIPHER_CTX_get0_cipher(ctx)) != NULL in EVP_SealInit() 43 if (EVP_CIPHER_CTX_rand_key(ctx, key) <= 0) in EVP_SealInit() 46 len = EVP_CIPHER_CTX_get_iv_length(ctx); in EVP_SealInit() 50 len = EVP_CIPHER_CTX_get_key_length(ctx); in EVP_SealInit() 54 if (!EVP_EncryptInit_ex(ctx, NULL, NULL, key, iv)) in EVP_SealInit() 80 int EVP_SealFinal(EVP_CIPHER_CTX *ctx, unsigned char *out, int *outl) in EVP_SealFinal() argument 83 i = EVP_EncryptFinal_ex(ctx, ou in EVP_SealFinal() [all...] |
/third_party/openssl/providers/implementations/ciphers/ |
H A D | cipher_aes_ocb_hw.c | 21 CRYPTO_ocb128_cleanup(&ctx->ocb); \ 22 fn_set_enc_key(key, keylen * 8, &ctx->ksenc.ks); \ 23 fn_set_dec_key(key, keylen * 8, &ctx->ksdec.ks); \ 24 if (!CRYPTO_ocb128_init(&ctx->ocb, &ctx->ksenc.ks, &ctx->ksdec.ks, \ 26 ctx->base.enc ? (ocb128_f)fn_stream_enc : \ 29 ctx->key_set = 1 36 PROV_AES_OCB_CTX *ctx = (PROV_AES_OCB_CTX *)vctx; in cipher_hw_aes_ocb_generic_initkey() local 68 PROV_AES_OCB_CTX *ctx in cipher_hw_aes_ocb_aesni_initkey() local 91 PROV_AES_OCB_CTX *ctx = (PROV_AES_OCB_CTX *)vctx; cipher_hw_aes_ocb_t4_initkey() local [all...] |
/third_party/openssl/include/openssl/ |
H A D | async.h | 57 void ASYNC_WAIT_CTX_free(ASYNC_WAIT_CTX *ctx); 58 int ASYNC_WAIT_CTX_set_wait_fd(ASYNC_WAIT_CTX *ctx, const void *key, 63 int ASYNC_WAIT_CTX_get_fd(ASYNC_WAIT_CTX *ctx, const void *key, 65 int ASYNC_WAIT_CTX_get_all_fds(ASYNC_WAIT_CTX *ctx, OSSL_ASYNC_FD *fd, 67 int ASYNC_WAIT_CTX_get_callback(ASYNC_WAIT_CTX *ctx, 70 int ASYNC_WAIT_CTX_set_callback(ASYNC_WAIT_CTX *ctx, 73 int ASYNC_WAIT_CTX_set_status(ASYNC_WAIT_CTX *ctx, int status); 74 int ASYNC_WAIT_CTX_get_status(ASYNC_WAIT_CTX *ctx); 75 int ASYNC_WAIT_CTX_get_changed_fds(ASYNC_WAIT_CTX *ctx, OSSL_ASYNC_FD *addfd, 78 int ASYNC_WAIT_CTX_clear_fd(ASYNC_WAIT_CTX *ctx, cons [all...] |
/third_party/skia/tests/ |
H A D | GrSubmittedFlushTest.cpp | 23 static void testing_submitted_proc(void* ctx, bool success) { in testing_submitted_proc() argument 24 SubmittedInfo* info = (SubmittedInfo*)ctx; in testing_submitted_proc() 30 auto ctx = ctxInfo.directContext(); in DEF_GPUTEST_FOR_RENDERING_CONTEXTS() local 33 sk_sp<SkSurface> surface = SkSurface::MakeRenderTarget(ctx, SkBudgeted::kNo, info); in DEF_GPUTEST_FOR_RENDERING_CONTEXTS() 46 ctx->flush(flushInfo); in DEF_GPUTEST_FOR_RENDERING_CONTEXTS() 49 ctx->submit(); in DEF_GPUTEST_FOR_RENDERING_CONTEXTS() 60 ctx->flush(flushInfo); in DEF_GPUTEST_FOR_RENDERING_CONTEXTS() 63 ctx->submit(); in DEF_GPUTEST_FOR_RENDERING_CONTEXTS() 74 ctx->submit(); in DEF_GPUTEST_FOR_RENDERING_CONTEXTS() 81 ctx in DEF_GPUTEST_FOR_RENDERING_CONTEXTS() [all...] |
/kernel/linux/linux-5.10/arch/sparc/crypto/ |
H A D | camellia_glue.c | 40 struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm); in camellia_set_key() local 46 ctx->key_len = key_len; in camellia_set_key() 48 camellia_sparc64_key_expand(in_key, &ctx->encrypt_key[0], in camellia_set_key() 49 key_len, &ctx->decrypt_key[0]); in camellia_set_key() 64 struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm); in camellia_encrypt() local 66 camellia_sparc64_crypt(&ctx->encrypt_key[0], in camellia_encrypt() 68 (u32 *) dst, ctx->key_len); in camellia_encrypt() 73 struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm); in camellia_decrypt() local 75 camellia_sparc64_crypt(&ctx->decrypt_key[0], in camellia_decrypt() 77 (u32 *) dst, ctx in camellia_decrypt() 91 const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm); __ecb_crypt() local 141 const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm); cbc_encrypt() local 170 const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm); cbc_decrypt() local [all...] |
/kernel/linux/linux-6.6/arch/sparc/crypto/ |
H A D | camellia_glue.c | 40 struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm); in camellia_set_key() local 46 ctx->key_len = key_len; in camellia_set_key() 48 camellia_sparc64_key_expand(in_key, &ctx->encrypt_key[0], in camellia_set_key() 49 key_len, &ctx->decrypt_key[0]); in camellia_set_key() 64 struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm); in camellia_encrypt() local 66 camellia_sparc64_crypt(&ctx->encrypt_key[0], in camellia_encrypt() 68 (u32 *) dst, ctx->key_len); in camellia_encrypt() 73 struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm); in camellia_decrypt() local 75 camellia_sparc64_crypt(&ctx->decrypt_key[0], in camellia_decrypt() 77 (u32 *) dst, ctx in camellia_decrypt() 91 const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm); __ecb_crypt() local 141 const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm); cbc_encrypt() local 170 const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm); cbc_decrypt() local [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/ |
H A D | drm_context.c | 334 struct drm_ctx ctx; in drm_legacy_resctx() local 342 memset(&ctx, 0, sizeof(ctx)); in drm_legacy_resctx() 344 ctx.handle = i; in drm_legacy_resctx() 345 if (copy_to_user(&res->contexts[i], &ctx, sizeof(ctx))) in drm_legacy_resctx() 369 struct drm_ctx *ctx = data; in drm_legacy_addctx() local 388 ctx->handle = tmp_handle; in drm_legacy_addctx() 397 ctx_entry->handle = ctx->handle; in drm_legacy_addctx() 419 struct drm_ctx *ctx in drm_legacy_getctx() local 445 struct drm_ctx *ctx = data; drm_legacy_switchctx() local 469 struct drm_ctx *ctx = data; drm_legacy_newctx() local 495 struct drm_ctx *ctx = data; drm_legacy_rmctx() local [all...] |
/kernel/linux/linux-5.10/fs/orangefs/ |
H A D | dir.c | 43 * The position pointer (ctx->pos) encodes the part and offset on which 190 struct dir_context *ctx) in fill_from_part() 197 i = ctx->pos & ~PART_MASK; in fill_from_part() 226 if (!dir_emit(ctx, s, *len, in fill_from_part() 233 ctx->pos = (ctx->pos & PART_MASK) | i; in fill_from_part() 243 struct dir_context *ctx) in orangefs_dir_fill() 248 count = ((ctx->pos & PART_MASK) >> PART_SHIFT) - 1; in orangefs_dir_fill() 263 r = fill_from_part(part, ctx); in orangefs_dir_fill() 274 ctx in orangefs_dir_fill() 189 fill_from_part(struct orangefs_dir_part *part, struct dir_context *ctx) fill_from_part() argument 241 orangefs_dir_fill(struct orangefs_inode_s *oi, struct orangefs_dir *od, struct dentry *dentry, struct dir_context *ctx) orangefs_dir_fill() argument 304 orangefs_dir_iterate(struct file *file, struct dir_context *ctx) orangefs_dir_iterate() argument [all...] |
/kernel/linux/linux-6.6/fs/orangefs/ |
H A D | dir.c | 43 * The position pointer (ctx->pos) encodes the part and offset on which 190 struct dir_context *ctx) in fill_from_part() 197 i = ctx->pos & ~PART_MASK; in fill_from_part() 226 if (!dir_emit(ctx, s, *len, in fill_from_part() 233 ctx->pos = (ctx->pos & PART_MASK) | i; in fill_from_part() 243 struct dir_context *ctx) in orangefs_dir_fill() 248 count = ((ctx->pos & PART_MASK) >> PART_SHIFT) - 1; in orangefs_dir_fill() 263 r = fill_from_part(part, ctx); in orangefs_dir_fill() 274 ctx in orangefs_dir_fill() 189 fill_from_part(struct orangefs_dir_part *part, struct dir_context *ctx) fill_from_part() argument 241 orangefs_dir_fill(struct orangefs_inode_s *oi, struct orangefs_dir *od, struct dentry *dentry, struct dir_context *ctx) orangefs_dir_fill() argument 304 orangefs_dir_iterate(struct file *file, struct dir_context *ctx) orangefs_dir_iterate() argument [all...] |
/kernel/linux/linux-6.6/drivers/gpu/drm/ |
H A D | drm_context.c | 329 struct drm_ctx ctx; in drm_legacy_resctx() local 336 memset(&ctx, 0, sizeof(ctx)); in drm_legacy_resctx() 338 ctx.handle = i; in drm_legacy_resctx() 339 if (copy_to_user(&res->contexts[i], &ctx, sizeof(ctx))) in drm_legacy_resctx() 363 struct drm_ctx *ctx = data; in drm_legacy_addctx() local 381 ctx->handle = tmp_handle; in drm_legacy_addctx() 390 ctx_entry->handle = ctx->handle; in drm_legacy_addctx() 412 struct drm_ctx *ctx in drm_legacy_getctx() local 437 struct drm_ctx *ctx = data; drm_legacy_switchctx() local 460 struct drm_ctx *ctx = data; drm_legacy_newctx() local 485 struct drm_ctx *ctx = data; drm_legacy_rmctx() local [all...] |
/kernel/linux/linux-6.6/tools/testing/selftests/bpf/progs/ |
H A D | map_kptr_fail.c | 25 int size_not_bpf_dw(struct __sk_buff *ctx) in size_not_bpf_dw() argument 40 int non_const_var_off(struct __sk_buff *ctx) in non_const_var_off() argument 49 id = ctx->protocol; in non_const_var_off() 59 int non_const_var_off_kptr_xchg(struct __sk_buff *ctx) in non_const_var_off_kptr_xchg() argument 68 id = ctx->protocol; in non_const_var_off_kptr_xchg() 78 int misaligned_access_write(struct __sk_buff *ctx) in misaligned_access_write() argument 94 int misaligned_access_read(struct __sk_buff *ctx) in misaligned_access_read() argument 108 int reject_var_off_store(struct __sk_buff *ctx) in reject_var_off_store() argument 121 id = ctx->protocol; in reject_var_off_store() 132 int reject_bad_type_match(struct __sk_buff *ctx) in reject_bad_type_match() argument 153 marked_as_untrusted_or_null(struct __sk_buff *ctx) marked_as_untrusted_or_null() argument 168 correct_btf_id_check_size(struct __sk_buff *ctx) correct_btf_id_check_size() argument 186 inherit_untrusted_on_walk(struct __sk_buff *ctx) inherit_untrusted_on_walk() argument 206 reject_kptr_xchg_on_unref(struct __sk_buff *ctx) reject_kptr_xchg_on_unref() argument 221 mark_ref_as_untrusted_or_null(struct __sk_buff *ctx) mark_ref_as_untrusted_or_null() argument 236 reject_untrusted_store_to_ref(struct __sk_buff *ctx) reject_untrusted_store_to_ref() argument 256 reject_untrusted_xchg(struct __sk_buff *ctx) reject_untrusted_xchg() argument 276 reject_bad_type_xchg(struct __sk_buff *ctx) reject_bad_type_xchg() argument 295 reject_member_of_ref_xchg(struct __sk_buff *ctx) reject_member_of_ref_xchg() argument 314 reject_indirect_helper_access(struct __sk_buff *ctx) reject_indirect_helper_access() argument 335 reject_indirect_global_func_access(struct __sk_buff *ctx) reject_indirect_global_func_access() argument 349 kptr_xchg_ref_state(struct __sk_buff *ctx) kptr_xchg_ref_state() argument 368 kptr_xchg_possibly_null(struct __sk_buff *ctx) kptr_xchg_possibly_null() argument [all...] |
/third_party/mesa3d/src/gallium/drivers/virgl/ |
H A D | virgl_texture.c | 115 static void *texture_transfer_map_resolve(struct pipe_context *ctx, in texture_transfer_map_resolve() argument 122 struct virgl_context *vctx = virgl_context(ctx); in texture_transfer_map_resolve() 133 if (!virgl_has_readback_format(ctx->screen, pipe_to_virgl_format(fmt), true)) { in texture_transfer_map_resolve() 142 assert(virgl_has_readback_format(ctx->screen, pipe_to_virgl_format(fmt), true)); in texture_transfer_map_resolve() 157 resolve_tmp = ctx->screen->resource_create(ctx->screen, &templ); in texture_transfer_map_resolve() 162 virgl_copy_region_with_blit(ctx, resolve_tmp, 0, &dst_box, resource, in texture_transfer_map_resolve() 164 ctx->flush(ctx, NULL, 0); in texture_transfer_map_resolve() 167 void *ptr = virgl_resource_transfer_map(ctx, resolve_tm in texture_transfer_map_resolve() 233 virgl_texture_transfer_map(struct pipe_context *ctx, struct pipe_resource *resource, unsigned level, unsigned usage, const struct pipe_box *box, struct pipe_transfer **transfer) virgl_texture_transfer_map() argument 247 flush_data(struct pipe_context *ctx, struct virgl_transfer *trans, const struct pipe_box *box) flush_data() argument 257 virgl_texture_transfer_unmap(struct pipe_context *ctx, struct pipe_transfer *transfer) virgl_texture_transfer_unmap() argument [all...] |
/third_party/mesa3d/src/gallium/drivers/freedreno/a6xx/ |
H A D | fd6_context.c | 76 struct fd_context *ctx = fd_context(pctx); in fd6_vertex_state_create() local 82 fd_ringbuffer_new_object(ctx->pipe, 4 * (num_elements * 2 + 1)); in fd6_vertex_state_create() 144 setup_state_map(struct fd_context *ctx) in setup_state_map() argument 148 fd_context_add_map(ctx, FD_DIRTY_VTXSTATE, BIT(FD6_GROUP_VTXSTATE)); in setup_state_map() 149 fd_context_add_map(ctx, FD_DIRTY_VTXBUF, BIT(FD6_GROUP_VBO)); in setup_state_map() 150 fd_context_add_map(ctx, FD_DIRTY_ZSA | FD_DIRTY_RASTERIZER, in setup_state_map() 152 fd_context_add_map(ctx, FD_DIRTY_ZSA | FD_DIRTY_BLEND | FD_DIRTY_PROG, in setup_state_map() 154 fd_context_add_map(ctx, FD_DIRTY_PROG | FD_DIRTY_RASTERIZER_CLIP_PLANE_ENABLE, in setup_state_map() 156 fd_context_add_map(ctx, FD_DIRTY_RASTERIZER, BIT(FD6_GROUP_RASTERIZER)); in setup_state_map() 157 fd_context_add_map(ctx, in setup_state_map() [all...] |
/third_party/wpa_supplicant/wpa_supplicant-2.9_standard/src/eapol_supp/ |
H A D | eapol_supp_sm.h | 86 * ctx - Pointer to arbitrary upper level context 88 void *ctx; member 102 * @ctx: Pointer to context data (cb_ctx) 110 void *ctx); 136 * @ctx: Callback context (ctx) 142 void (*eapol_done_cb)(void *ctx); 146 * @ctx: Callback context (eapol_send_ctx) 152 int (*eapol_send)(void *ctx, int type, const u8 *buf, size_t len); 156 * @ctx 371 eapol_sm_init(struct eapol_ctx *ctx) eapol_sm_init() argument [all...] |
/kernel/linux/linux-5.10/tools/testing/selftests/bpf/progs/ |
H A D | sendmsg4_prog.c | 22 int sendmsg_v4_prog(struct bpf_sock_addr *ctx) in sendmsg_v4_prog() argument 24 if (ctx->type != SOCK_DGRAM) in sendmsg_v4_prog() 28 if (ctx->msg_src_ip4 == bpf_htonl(SRC1_IP4) || in sendmsg_v4_prog() 29 ctx->msg_src_ip4 == bpf_htonl(SRC2_IP4)) { in sendmsg_v4_prog() 30 ctx->msg_src_ip4 = bpf_htonl(SRC_REWRITE_IP4); in sendmsg_v4_prog() 37 if ((ctx->user_ip4 >> 24) == (bpf_htonl(DST_IP4) >> 24) && in sendmsg_v4_prog() 38 ctx->user_port == bpf_htons(DST_PORT)) { in sendmsg_v4_prog() 39 ctx->user_ip4 = bpf_htonl(DST_REWRITE_IP4); in sendmsg_v4_prog() 40 ctx->user_port = bpf_htons(DST_REWRITE_PORT4); in sendmsg_v4_prog()
|
/kernel/linux/linux-5.10/tools/perf/util/ |
H A D | expr.h | 45 void expr__ctx_init(struct expr_parse_ctx *ctx); 46 void expr__ctx_clear(struct expr_parse_ctx *ctx); 47 void expr__del_id(struct expr_parse_ctx *ctx, const char *id); 48 int expr__add_id(struct expr_parse_ctx *ctx, const char *id); 49 int expr__add_id_val(struct expr_parse_ctx *ctx, const char *id, double val); 50 int expr__add_ref(struct expr_parse_ctx *ctx, struct metric_ref *ref); 51 int expr__get_id(struct expr_parse_ctx *ctx, const char *id, 53 int expr__resolve_id(struct expr_parse_ctx *ctx, const char *id, 55 int expr__parse(double *final_val, struct expr_parse_ctx *ctx,
|