Home
last modified time | relevance | path

Searched refs:nodes (Results 51 - 75 of 924) sorted by relevance

12345678910>>...37

/kernel/linux/linux-5.10/Documentation/sphinx/
H A DrstFlatTable.py47 from docutils import nodes namespace
104 class rowSpan(nodes.General, nodes.Element): pass # pylint: disable=C0103,C0321
105 class colSpan(nodes.General, nodes.Element): pass # pylint: disable=C0103,C0321
127 nodes.literal_block(self.block_text, self.block_text),
132 node = nodes.Element() # anonymous container for parsing
166 table = nodes.table()
167 tgroup = nodes.tgroup(cols=len(colwidths))
172 colspec = nodes
[all...]
/kernel/linux/linux-6.6/Documentation/sphinx/
H A DrstFlatTable.py45 from docutils import nodes namespace
94 class rowSpan(nodes.General, nodes.Element): pass # pylint: disable=C0103,C0321
95 class colSpan(nodes.General, nodes.Element): pass # pylint: disable=C0103,C0321
117 nodes.literal_block(self.block_text, self.block_text),
122 node = nodes.Element() # anonymous container for parsing
156 table = nodes.table()
157 tgroup = nodes.tgroup(cols=len(colwidths))
162 colspec = nodes
[all...]
/third_party/ltp/testcases/kernel/lib/
H A Dnuma_helper.c78 * set of nodes anyway. in filter_nodemask_mem()
140 * get_allowed_nodes_arr - get number and array of available nodes
141 * @num_nodes: pointer where number of available nodes will be stored
142 * @nodes: array of available node ids, this is MPOL_F_MEMS_ALLOWED
152 int get_allowed_nodes_arr(int flag, int *num_nodes, int **nodes) in get_allowed_nodes_arr() argument
160 if (nodes) in get_allowed_nodes_arr()
161 *nodes = NULL; in get_allowed_nodes_arr()
173 if (nodes) in get_allowed_nodes_arr()
174 *nodes = malloc(sizeof(int) * max_node); in get_allowed_nodes_arr()
177 if (nodemask == NULL || (nodes in get_allowed_nodes_arr()
221 int *nodes = NULL; get_allowed_nodes() local
[all...]
/third_party/node/deps/npm/test/lib/utils/
H A Dsbom-cyclonedx.js75 const res = cyclonedxOutput({ npm, nodes: [root], packageType: 'application' })
81 const res = cyclonedxOutput({ npm, nodes: [root], packageLockOnly: true })
88 const res = cyclonedxOutput({ npm, nodes: [node] })
96 const res = cyclonedxOutput({ npm, nodes: [node] })
104 const res = cyclonedxOutput({ npm, nodes: [node] })
112 const res = cyclonedxOutput({ npm, nodes: [node] })
119 const res = cyclonedxOutput({ npm, nodes: [node] })
126 const res = cyclonedxOutput({ npm, nodes: [node] })
133 const res = cyclonedxOutput({ npm, nodes: [node] })
141 const res = cyclonedxOutput({ npm, nodes
[all...]
/third_party/typescript/tests/baselines/reference/
H A DexpressionTypeNodeShouldError.js17 const nodes = document.getElementsByTagName("li");
18 type ItemType = "".typeof(nodes.item(0));
32 type ItemType2 = 4..typeof(nodes.item(0));
46 type ItemType3 = true.typeof(nodes.item(0));
60 var nodes = document.getElementsByTagName("li"); variable
61 typeof (nodes.item(0));
73 typeof (nodes.item(0));
85 typeof (nodes.item(0));
/kernel/linux/linux-5.10/tools/perf/tests/
H A Dmem2node.c49 struct memory_node nodes[3]; in test__mem2node() local
51 .memory_nodes = (struct memory_node *) &nodes[0], in test__mem2node()
52 .nr_memory_nodes = ARRAY_SIZE(nodes), in test__mem2node()
57 for (i = 0; i < ARRAY_SIZE(nodes); i++) { in test__mem2node()
58 nodes[i].node = test_nodes[i].node; in test__mem2node()
59 nodes[i].size = 10; in test__mem2node()
62 (nodes[i].set = get_bitmap(test_nodes[i].map, 10))); in test__mem2node()
74 for (i = 0; i < ARRAY_SIZE(nodes); i++) in test__mem2node()
75 zfree(&nodes[i].set); in test__mem2node()
/kernel/linux/linux-6.6/tools/perf/tests/
H A Dmem2node.c50 struct memory_node nodes[3]; in test__mem2node() local
52 .memory_nodes = (struct memory_node *) &nodes[0], in test__mem2node()
53 .nr_memory_nodes = ARRAY_SIZE(nodes), in test__mem2node()
58 for (i = 0; i < ARRAY_SIZE(nodes); i++) { in test__mem2node()
59 nodes[i].node = test_nodes[i].node; in test__mem2node()
60 nodes[i].size = 10; in test__mem2node()
63 (nodes[i].set = get_bitmap(test_nodes[i].map, 10))); in test__mem2node()
75 for (i = 0; i < ARRAY_SIZE(nodes); i++) in test__mem2node()
76 zfree(&nodes[i].set); in test__mem2node()
/third_party/node/deps/brotli/c/enc/
H A Dbackward_references_hq.c246 /* REQUIRES: cost < kInfinity, nodes[start_pos].cost < kInfinity */
248 static BROTLI_INLINE void UpdateZopfliNode(ZopfliNode* nodes, size_t pos, in UpdateZopfliNode() argument
251 ZopfliNode* next = &nodes[pos + len]; in UpdateZopfliNode()
303 const ZopfliNode* nodes, in ComputeMinimumCopyLength()
311 while (pos + len <= num_bytes && nodes[pos + len].u.cost <= min_cost) { in ComputeMinimumCopyLength()
327 /* REQUIRES: nodes[pos].cost < kInfinity
328 REQUIRES: nodes[0..pos] satisfies that "ZopfliNode array invariant". */
333 const ZopfliNode* nodes) { in ComputeDistanceShortcut()
334 const size_t clen = ZopfliNodeCopyLength(&nodes[pos]); in ComputeDistanceShortcut()
335 const size_t ilen = nodes[po in ComputeDistanceShortcut()
302 ComputeMinimumCopyLength(const float start_cost, const ZopfliNode* nodes, const size_t num_bytes, const size_t pos) ComputeMinimumCopyLength() argument
329 ComputeDistanceShortcut(const size_t block_start, const size_t pos, const size_t max_backward_limit, const size_t gap, const ZopfliNode* nodes) ComputeDistanceShortcut() argument
360 ComputeDistanceCache(const size_t pos, const int* starting_dist_cache, const ZopfliNode* nodes, int* dist_cache) ComputeDistanceCache() argument
381 EvaluateNode( const size_t block_start, const size_t pos, const size_t max_backward_limit, const size_t gap, const int* starting_dist_cache, const ZopfliCostModel* model, StartPosQueue* queue, ZopfliNode* nodes) EvaluateNode() argument
402 UpdateNodes( const size_t num_bytes, const size_t block_start, const size_t pos, const uint8_t* ringbuffer, const size_t ringbuffer_mask, const BrotliEncoderParams* params, const size_t max_backward_limit, const int* starting_dist_cache, const size_t num_matches, const BackwardMatch* matches, const ZopfliCostModel* model, StartPosQueue* queue, ZopfliNode* nodes) UpdateNodes() argument
554 ComputeShortestPathFromNodes(size_t num_bytes, ZopfliNode* nodes) ComputeShortestPathFromNodes() argument
571 BrotliZopfliCreateCommands(const size_t num_bytes, const size_t block_start, const ZopfliNode* nodes, int* dist_cache, size_t* last_insert_len, const BrotliEncoderParams* params, Command* commands, size_t* num_literals) BrotliZopfliCreateCommands() argument
616 ZopfliIterate(size_t num_bytes, size_t position, const uint8_t* ringbuffer, size_t ringbuffer_mask, const BrotliEncoderParams* params, const size_t gap, const int* dist_cache, const ZopfliCostModel* model, const uint32_t* num_matches, const BackwardMatch* matches, ZopfliNode* nodes) ZopfliIterate() argument
657 BrotliZopfliComputeShortestPath(MemoryManager* m, size_t num_bytes, size_t position, const uint8_t* ringbuffer, size_t ringbuffer_mask, ContextLut literal_context_lut, const BrotliEncoderParams* params, const int* dist_cache, Hasher* hasher, ZopfliNode* nodes) BrotliZopfliComputeShortestPath() argument
727 ZopfliNode* nodes = BROTLI_ALLOC(m, ZopfliNode, num_bytes + 1); BrotliCreateZopfliBackwardReferences() local
757 ZopfliNode* nodes; BrotliCreateHqZopfliBackwardReferences() local
[all...]
/third_party/skia/third_party/externals/brotli/c/enc/
H A Dbackward_references_hq.c246 /* REQUIRES: cost < kInfinity, nodes[start_pos].cost < kInfinity */
248 static BROTLI_INLINE void UpdateZopfliNode(ZopfliNode* nodes, size_t pos, in UpdateZopfliNode() argument
251 ZopfliNode* next = &nodes[pos + len]; in UpdateZopfliNode()
303 const ZopfliNode* nodes, in ComputeMinimumCopyLength()
311 while (pos + len <= num_bytes && nodes[pos + len].u.cost <= min_cost) { in ComputeMinimumCopyLength()
327 /* REQUIRES: nodes[pos].cost < kInfinity
328 REQUIRES: nodes[0..pos] satisfies that "ZopfliNode array invariant". */
333 const ZopfliNode* nodes) { in ComputeDistanceShortcut()
334 const size_t clen = ZopfliNodeCopyLength(&nodes[pos]); in ComputeDistanceShortcut()
335 const size_t ilen = nodes[po in ComputeDistanceShortcut()
302 ComputeMinimumCopyLength(const float start_cost, const ZopfliNode* nodes, const size_t num_bytes, const size_t pos) ComputeMinimumCopyLength() argument
329 ComputeDistanceShortcut(const size_t block_start, const size_t pos, const size_t max_backward_limit, const size_t gap, const ZopfliNode* nodes) ComputeDistanceShortcut() argument
360 ComputeDistanceCache(const size_t pos, const int* starting_dist_cache, const ZopfliNode* nodes, int* dist_cache) ComputeDistanceCache() argument
381 EvaluateNode( const size_t block_start, const size_t pos, const size_t max_backward_limit, const size_t gap, const int* starting_dist_cache, const ZopfliCostModel* model, StartPosQueue* queue, ZopfliNode* nodes) EvaluateNode() argument
402 UpdateNodes( const size_t num_bytes, const size_t block_start, const size_t pos, const uint8_t* ringbuffer, const size_t ringbuffer_mask, const BrotliEncoderParams* params, const size_t max_backward_limit, const int* starting_dist_cache, const size_t num_matches, const BackwardMatch* matches, const ZopfliCostModel* model, StartPosQueue* queue, ZopfliNode* nodes) UpdateNodes() argument
554 ComputeShortestPathFromNodes(size_t num_bytes, ZopfliNode* nodes) ComputeShortestPathFromNodes() argument
571 BrotliZopfliCreateCommands(const size_t num_bytes, const size_t block_start, const ZopfliNode* nodes, int* dist_cache, size_t* last_insert_len, const BrotliEncoderParams* params, Command* commands, size_t* num_literals) BrotliZopfliCreateCommands() argument
616 ZopfliIterate(size_t num_bytes, size_t position, const uint8_t* ringbuffer, size_t ringbuffer_mask, const BrotliEncoderParams* params, const size_t gap, const int* dist_cache, const ZopfliCostModel* model, const uint32_t* num_matches, const BackwardMatch* matches, ZopfliNode* nodes) ZopfliIterate() argument
657 BrotliZopfliComputeShortestPath(MemoryManager* m, size_t num_bytes, size_t position, const uint8_t* ringbuffer, size_t ringbuffer_mask, ContextLut literal_context_lut, const BrotliEncoderParams* params, const int* dist_cache, Hasher* hasher, ZopfliNode* nodes) BrotliZopfliComputeShortestPath() argument
727 ZopfliNode* nodes = BROTLI_ALLOC(m, ZopfliNode, num_bytes + 1); BrotliCreateZopfliBackwardReferences() local
757 ZopfliNode* nodes; BrotliCreateHqZopfliBackwardReferences() local
[all...]
/third_party/nghttp2/tests/
H A Dnghttp2_pq_test.c146 node nodes[10]; in test_nghttp2_pq_update() local
153 for (i = 0; i < (int)(sizeof(nodes) / sizeof(nodes[0])); ++i) { in test_nghttp2_pq_update()
154 nodes[i].key = i; in test_nghttp2_pq_update()
155 nodes[i].val = i; in test_nghttp2_pq_update()
156 nghttp2_pq_push(&pq, &nodes[i].ent); in test_nghttp2_pq_update()
161 for (i = 0; i < (int)(sizeof(nodes) / sizeof(nodes[0])); ++i) { in test_nghttp2_pq_update()
191 node nodes[10]; in test_nghttp2_pq_remove() local
201 push_nodes(&pq, nodes, in test_nghttp2_pq_remove()
[all...]
/third_party/node/deps/npm/node_modules/archy/
H A Dindex.js17 var nodes = obj.nodes || []; variable
19 var splitter = '\n' + prefix + (nodes.length ? chr('│') : ' ') + ' ';
23 + nodes.map(function (node, ix) {
24 var last = ix === nodes.length - 1;
25 var more = node.nodes && node.nodes.length;
/kernel/linux/linux-5.10/arch/arm/mach-sunxi/
H A Dmc_smp.c690 * This holds any device nodes that we requested resources for,
703 int (*get_smp_nodes)(struct sunxi_mc_smp_nodes *nodes);
707 static void __init sunxi_mc_smp_put_nodes(struct sunxi_mc_smp_nodes *nodes) in sunxi_mc_smp_put_nodes() argument
709 of_node_put(nodes->prcm_node); in sunxi_mc_smp_put_nodes()
710 of_node_put(nodes->cpucfg_node); in sunxi_mc_smp_put_nodes()
711 of_node_put(nodes->sram_node); in sunxi_mc_smp_put_nodes()
712 of_node_put(nodes->r_cpucfg_node); in sunxi_mc_smp_put_nodes()
713 memset(nodes, 0, sizeof(*nodes)); in sunxi_mc_smp_put_nodes()
716 static int __init sun9i_a80_get_smp_nodes(struct sunxi_mc_smp_nodes *nodes) in sun9i_a80_get_smp_nodes() argument
742 sun8i_a83t_get_smp_nodes(struct sunxi_mc_smp_nodes *nodes) sun8i_a83t_get_smp_nodes() argument
782 struct sunxi_mc_smp_nodes nodes = { 0 }; sunxi_mc_smp_init() local
[all...]
/kernel/linux/linux-6.6/arch/arm/mach-sunxi/
H A Dmc_smp.c689 * This holds any device nodes that we requested resources for,
702 int (*get_smp_nodes)(struct sunxi_mc_smp_nodes *nodes);
706 static void __init sunxi_mc_smp_put_nodes(struct sunxi_mc_smp_nodes *nodes) in sunxi_mc_smp_put_nodes() argument
708 of_node_put(nodes->prcm_node); in sunxi_mc_smp_put_nodes()
709 of_node_put(nodes->cpucfg_node); in sunxi_mc_smp_put_nodes()
710 of_node_put(nodes->sram_node); in sunxi_mc_smp_put_nodes()
711 of_node_put(nodes->r_cpucfg_node); in sunxi_mc_smp_put_nodes()
712 memset(nodes, 0, sizeof(*nodes)); in sunxi_mc_smp_put_nodes()
715 static int __init sun9i_a80_get_smp_nodes(struct sunxi_mc_smp_nodes *nodes) in sun9i_a80_get_smp_nodes() argument
741 sun8i_a83t_get_smp_nodes(struct sunxi_mc_smp_nodes *nodes) sun8i_a83t_get_smp_nodes() argument
781 struct sunxi_mc_smp_nodes nodes = { 0 }; sunxi_mc_smp_init() local
[all...]
/third_party/ffmpeg/libavcodec/
H A Dg722enc.c174 struct TrellisNode **nodes[2]; in g722_encode_trellis() local
180 nodes[i] = c->nodep_buf[i]; in g722_encode_trellis()
183 nodes[i][0] = c->node_buf[i] + frontier; in g722_encode_trellis()
184 nodes[i][0]->ssd = 0; in g722_encode_trellis()
185 nodes[i][0]->path = 0; in g722_encode_trellis()
186 nodes[i][0]->state = c->band[i]; in g722_encode_trellis()
201 for (j = 0; j < frontier && nodes[0][j]; j++) { in g722_encode_trellis()
208 struct TrellisNode *cur_node = nodes[0][j]; in g722_encode_trellis()
237 /* Try to replace one of the leaf nodes with the new \ in g722_encode_trellis()
264 for (j = 0; j < frontier && nodes[ in g722_encode_trellis()
[all...]
/kernel/linux/linux-5.10/security/selinux/ss/
H A Dconditional.c34 struct cond_expr_node *node = &expr->nodes[i]; in cond_evaluate_expr()
105 avnode = node->true_list.nodes[i]; in evaluate_cond_node()
113 avnode = node->false_list.nodes[i]; in evaluate_cond_node()
142 kfree(node->expr.nodes); in cond_node_destroy()
143 /* the avtab_ptr_t nodes are destroyed by the avtab */ in cond_node_destroy()
144 kfree(node->true_list.nodes); in cond_node_destroy()
145 kfree(node->false_list.nodes); in cond_node_destroy()
295 if (other->nodes[i] == node_ptr) { in cond_insertf()
340 list->nodes = kcalloc(len, sizeof(*list->nodes), GFP_KERNE in cond_read_av_list()
[all...]
/third_party/node/deps/v8/third_party/jinja2/
H A Dext.py9 from . import nodes namespace
28 from .nodes import ContextReference
108 list of multiple nodes.
120 return nodes.ExtensionAttribute(self.identifier, name, lineno=lineno)
126 :meth:`attr` + :class:`jinja2.nodes.Call`.
132 return nodes.Call(
265 variables[name.value] = var = nodes.Name(name.value, "load")
268 if isinstance(var, nodes.Call):
269 plural_expr = nodes.Name("_trans", "load")
271 plural_expr_assignment = nodes
[all...]
/third_party/skia/third_party/externals/jinja2/
H A Dext.py9 from . import nodes namespace
28 from .nodes import ContextReference
108 list of multiple nodes.
120 return nodes.ExtensionAttribute(self.identifier, name, lineno=lineno)
126 :meth:`attr` + :class:`jinja2.nodes.Call`.
132 return nodes.Call(
265 variables[name.value] = var = nodes.Name(name.value, "load")
268 if isinstance(var, nodes.Call):
269 plural_expr = nodes.Name("_trans", "load")
271 plural_expr_assignment = nodes
[all...]
/kernel/linux/linux-5.10/lib/
H A Dinterval_tree_test.c14 __param(int, nnodes, 100, "Number of nodes in the interval tree");
19 __param(bool, search_all, false, "Searches will iterate all nodes in the tree");
24 static struct interval_tree_node *nodes = NULL; variable
49 nodes[i].start = a; in init()
50 nodes[i].last = b; in init()
68 nodes = kmalloc_array(nnodes, sizeof(struct interval_tree_node), in interval_tree_test_init()
70 if (!nodes) in interval_tree_test_init()
75 kfree(nodes); in interval_tree_test_init()
88 interval_tree_insert(nodes + j, &root); in interval_tree_test_init()
90 interval_tree_remove(nodes in interval_tree_test_init()
[all...]
/kernel/linux/linux-6.6/lib/
H A Dinterval_tree_test.c14 __param(int, nnodes, 100, "Number of nodes in the interval tree");
19 __param(bool, search_all, false, "Searches will iterate all nodes in the tree");
24 static struct interval_tree_node *nodes = NULL; variable
49 nodes[i].start = a; in init()
50 nodes[i].last = b; in init()
68 nodes = kmalloc_array(nnodes, sizeof(struct interval_tree_node), in interval_tree_test_init()
70 if (!nodes) in interval_tree_test_init()
75 kfree(nodes); in interval_tree_test_init()
88 interval_tree_insert(nodes + j, &root); in interval_tree_test_init()
90 interval_tree_remove(nodes in interval_tree_test_init()
[all...]
/third_party/node/deps/openssl/openssl/crypto/x509/
H A Dpcy_node.c28 X509_POLICY_NODE *ossl_policy_tree_find_sk(STACK_OF(X509_POLICY_NODE) *nodes, in STACK_OF()
38 idx = sk_X509_POLICY_NODE_find(nodes, &l); in STACK_OF()
39 return sk_X509_POLICY_NODE_value(nodes, idx); in STACK_OF()
49 for (i = 0; i < sk_X509_POLICY_NODE_num(level->nodes); i++) { in ossl_policy_level_find_node()
50 node = sk_X509_POLICY_NODE_value(level->nodes, i); in ossl_policy_level_find_node()
85 if (level->nodes == NULL) in ossl_policy_level_add_node()
86 level->nodes = ossl_policy_node_cmp_new(); in ossl_policy_level_add_node()
87 if (level->nodes == NULL) { in ossl_policy_level_add_node()
91 if (!sk_X509_POLICY_NODE_push(level->nodes, node)) { in ossl_policy_level_add_node()
122 (void) sk_X509_POLICY_NODE_pop(level->nodes); in ossl_policy_level_add_node()
[all...]
/third_party/openssl/crypto/x509/
H A Dpcy_node.c28 X509_POLICY_NODE *ossl_policy_tree_find_sk(STACK_OF(X509_POLICY_NODE) *nodes, in STACK_OF()
38 idx = sk_X509_POLICY_NODE_find(nodes, &l); in STACK_OF()
39 return sk_X509_POLICY_NODE_value(nodes, idx); in STACK_OF()
49 for (i = 0; i < sk_X509_POLICY_NODE_num(level->nodes); i++) { in ossl_policy_level_find_node()
50 node = sk_X509_POLICY_NODE_value(level->nodes, i); in ossl_policy_level_find_node()
85 if (level->nodes == NULL) in ossl_policy_level_add_node()
86 level->nodes = ossl_policy_node_cmp_new(); in ossl_policy_level_add_node()
87 if (level->nodes == NULL) { in ossl_policy_level_add_node()
91 if (!sk_X509_POLICY_NODE_push(level->nodes, node)) { in ossl_policy_level_add_node()
122 (void) sk_X509_POLICY_NODE_pop(level->nodes); in ossl_policy_level_add_node()
[all...]
/kernel/linux/linux-6.6/security/selinux/ss/
H A Dconditional.c34 struct cond_expr_node *node = &expr->nodes[i]; in cond_evaluate_expr()
105 avnode = node->true_list.nodes[i]; in evaluate_cond_node()
113 avnode = node->false_list.nodes[i]; in evaluate_cond_node()
142 kfree(node->expr.nodes); in cond_node_destroy()
143 /* the avtab_ptr_t nodes are destroyed by the avtab */ in cond_node_destroy()
144 kfree(node->true_list.nodes); in cond_node_destroy()
145 kfree(node->false_list.nodes); in cond_node_destroy()
296 if (other->nodes[i] == node_ptr) { in cond_insertf()
341 list->nodes = kcalloc(len, sizeof(*list->nodes), GFP_KERNE in cond_read_av_list()
[all...]
/kernel/linux/linux-6.6/mm/
H A Dmempolicy.c15 * interleave Allocate memory interleaved over a set of nodes,
22 * bind Only allocate memory on a specific set of nodes,
26 * the allocation to memory nodes instead
34 * preferred many Try a set of nodes first before normal fallback. This is
185 int (*create)(struct mempolicy *pol, const nodemask_t *nodes);
186 void (*rebind)(struct mempolicy *pol, const nodemask_t *nodes);
202 static int mpol_new_nodemask(struct mempolicy *pol, const nodemask_t *nodes) in mpol_new_nodemask() argument
204 if (nodes_empty(*nodes)) in mpol_new_nodemask()
206 pol->nodes = *nodes; in mpol_new_nodemask()
210 mpol_new_preferred(struct mempolicy *pol, const nodemask_t *nodes) mpol_new_preferred() argument
228 mpol_set_nodemask(struct mempolicy *pol, const nodemask_t *nodes, struct nodemask_scratch *nsc) mpol_set_nodemask() argument
265 mpol_new(unsigned short mode, unsigned short flags, nodemask_t *nodes) mpol_new() argument
319 mpol_rebind_default(struct mempolicy *pol, const nodemask_t *nodes) mpol_rebind_default() argument
323 mpol_rebind_nodemask(struct mempolicy *pol, const nodemask_t *nodes) mpol_rebind_nodemask() argument
343 mpol_rebind_preferred(struct mempolicy *pol, const nodemask_t *nodes) mpol_rebind_preferred() argument
747 queue_pages_range(struct mm_struct *mm, unsigned long start, unsigned long end, nodemask_t *nodes, unsigned long flags, struct list_head *pagelist, bool lock_vma) queue_pages_range() argument
862 do_set_mempolicy(unsigned short mode, unsigned short flags, nodemask_t *nodes) do_set_mempolicy() argument
903 get_policy_nodemask(struct mempolicy *p, nodemask_t *nodes) get_policy_nodemask() argument
1407 get_nodes(nodemask_t *nodes, const unsigned long __user *nmask, unsigned long maxnode) get_nodes() argument
1443 copy_nodes_to_user(unsigned long __user *mask, unsigned long maxnode, nodemask_t *nodes) copy_nodes_to_user() argument
1492 nodemask_t nodes; kernel_mbind() local
1587 nodemask_t nodes; kernel_set_mempolicy() local
1713 nodemask_t nodes; kernel_get_mempolicy() local
2326 int nodes; alloc_pages_bulk_array_interleave() local
3010 nodemask_t nodes; mpol_parse_str() local
3143 nodemask_t nodes = NODE_MASK_NONE; mpol_to_str() local
[all...]
/third_party/node/tools/inspector_protocol/jinja2/
H A Dext.py15 from jinja2 import nodes namespace
105 list of multiple nodes.
117 return nodes.ExtensionAttribute(self.identifier, name, lineno=lineno)
122 :meth:`attr` + :class:`jinja2.nodes.Call`.
128 return nodes.Call(self.attr(name, lineno=lineno), args, kwargs,
251 variables[name.value] = var = nodes.Name(name.value, 'load')
254 if isinstance(var, nodes.Call):
255 plural_expr = nodes.Name('_trans', 'load')
257 plural_expr_assignment = nodes.Assign(
258 nodes
[all...]
/kernel/linux/linux-5.10/drivers/interconnect/qcom/
H A Dsc7180.c161 .nodes = { &ebi },
168 .nodes = { &ebi },
175 .nodes = { &qns_llcc },
182 .nodes = { &qns_mem_noc_hf },
189 .nodes = { &qxm_crypto },
196 .nodes = { &qnm_snoc,
251 .nodes = { &qxm_camnoc_hf0_uncomp,
266 .nodes = { &acm_sys_tcu },
273 .nodes = { &qns_mem_noc_sf },
280 .nodes
[all...]

Completed in 16 milliseconds

12345678910>>...37