| /kernel/linux/linux-5.10/mm/ |
| H A D | mmzone.c | 46 static inline int zref_in_nodemask(struct zoneref *zref, nodemask_t *nodes) in zref_in_nodemask() argument 49 return node_isset(zonelist_node_idx(zref), *nodes); in zref_in_nodemask() 58 nodemask_t *nodes) in __next_zones_zonelist() 64 if (unlikely(nodes == NULL)) in __next_zones_zonelist() 69 (z->zone && !zref_in_nodemask(z, nodes))) in __next_zones_zonelist() 56 __next_zones_zonelist(struct zoneref *z, enum zone_type highest_zoneidx, nodemask_t *nodes) __next_zones_zonelist() argument
|
| /kernel/linux/linux-6.6/drivers/clk/zynqmp/ |
| H A D | clk-zynqmp.h | 73 const struct clock_topology *nodes); 78 const struct clock_topology *nodes); 84 const struct clock_topology *nodes); 89 const struct clock_topology *nodes); 95 const struct clock_topology *nodes);
|
| H A D | divider.c | 268 * @nodes: Clock topology node 276 const struct clock_topology *nodes) in zynqmp_clk_register_divider() 289 if (nodes->type_flag & CLK_DIVIDER_READ_ONLY) in zynqmp_clk_register_divider() 294 init.flags = zynqmp_clk_map_common_ccf_flags(nodes->flag); in zynqmp_clk_register_divider() 300 div->is_frac = !!((nodes->flag & CLK_FRAC) | in zynqmp_clk_register_divider() 301 (nodes->custom_type_flag & CUSTOM_FLAG_CLK_FRAC)); in zynqmp_clk_register_divider() 302 div->flags = zynqmp_clk_map_divider_ccf_flags(nodes->type_flag); in zynqmp_clk_register_divider() 305 div->div_type = nodes->type; in zynqmp_clk_register_divider() 311 div->max_div = zynqmp_clk_get_max_divisor(clk_id, nodes->type); in zynqmp_clk_register_divider() 272 zynqmp_clk_register_divider(const char *name, u32 clk_id, const char * const *parents, u8 num_parents, const struct clock_topology *nodes) zynqmp_clk_register_divider() argument
|
| /kernel/linux/linux-6.6/mm/ |
| H A D | mmzone.c | 46 static inline int zref_in_nodemask(struct zoneref *zref, nodemask_t *nodes) in zref_in_nodemask() argument 49 return node_isset(zonelist_node_idx(zref), *nodes); in zref_in_nodemask() 58 nodemask_t *nodes) in __next_zones_zonelist() 64 if (unlikely(nodes == NULL)) in __next_zones_zonelist() 69 (z->zone && !zref_in_nodemask(z, nodes))) in __next_zones_zonelist() 56 __next_zones_zonelist(struct zoneref *z, enum zone_type highest_zoneidx, nodemask_t *nodes) __next_zones_zonelist() argument
|
| /kernel/linux/linux-5.10/fs/btrfs/ |
| H A D | ulist.c | 25 * for (all child nodes n in elem) 31 * This assumes the graph nodes are addressable by u64. This stems from the 49 INIT_LIST_HEAD(&ulist->nodes); in ulist_init() 66 list_for_each_entry_safe(node, next, &ulist->nodes, list) { in ulist_release() 70 INIT_LIST_HEAD(&ulist->nodes); in ulist_release() 212 list_add_tail(&node->list, &ulist->nodes); in ulist_add_merge() 265 if (list_empty(&ulist->nodes)) in ulist_next() 267 if (uiter->cur_list && uiter->cur_list->next == &ulist->nodes) in ulist_next() 272 uiter->cur_list = ulist->nodes.next; in ulist_next()
|
| H A D | ctree.c | 102 if (!p->nodes[i]) in btrfs_release_path() 105 btrfs_tree_unlock_rw(p->nodes[i], p->locks[i]); in btrfs_release_path() 108 free_extent_buffer(p->nodes[i]); in btrfs_release_path() 109 p->nodes[i] = NULL; in btrfs_release_path() 1777 * leaves vs nodes 1840 * node level balancing, used to make sure nodes are in proper order for 1861 mid = path->nodes[level]; in balance_level() 1870 parent = path->nodes[level + 1]; in balance_level() 1910 path->nodes[level] = NULL; in balance_level() 2042 path->nodes[leve in balance_level() [all...] |
| H A D | tree-log.c | 373 u32 dst_size = btrfs_item_size_nr(path->nodes[0], in overwrite_item() 393 dst_ptr = btrfs_item_ptr_offset(path->nodes[0], path->slots[0]); in overwrite_item() 394 read_extent_buffer(path->nodes[0], dst_copy, dst_ptr, in overwrite_item() 420 item = btrfs_item_ptr(path->nodes[0], path->slots[0], in overwrite_item() 422 nbytes = btrfs_inode_nbytes(path->nodes[0], item); in overwrite_item() 467 found_size = btrfs_item_size_nr(path->nodes[0], in overwrite_item() 476 dst_ptr = btrfs_item_ptr_offset(path->nodes[0], in overwrite_item() 496 struct extent_buffer *dst_eb = path->nodes[0]; in overwrite_item() 515 S_ISDIR(btrfs_inode_mode(path->nodes[0], dst_item))) { in overwrite_item() 517 saved_i_size = btrfs_inode_size(path->nodes[ in overwrite_item() [all...] |
| /kernel/linux/linux-6.6/fs/btrfs/ |
| H A D | ulist.c | 26 * for (all child nodes n in elem) 32 * This assumes the graph nodes are addressable by u64. This stems from the 51 INIT_LIST_HEAD(&ulist->nodes); in ulist_init() 69 list_for_each_entry_safe(node, next, &ulist->nodes, list) { in ulist_release() 73 INIT_LIST_HEAD(&ulist->nodes); in ulist_release() 219 list_add_tail(&node->list, &ulist->nodes); in ulist_add_merge() 273 if (list_empty(&ulist->nodes)) in ulist_next() 275 if (uiter->cur_list && uiter->cur_list->next == &ulist->nodes) in ulist_next() 280 uiter->cur_list = ulist->nodes.next; in ulist_next()
|
| H A D | file-item.c | 183 leaf = path->nodes[0]; in btrfs_insert_hole_extent() 225 leaf = path->nodes[0]; in btrfs_lookup_csum() 300 if (path->nodes[0]) { in search_csum_tree() 301 item = btrfs_item_ptr(path->nodes[0], path->slots[0], in search_csum_tree() 303 btrfs_item_key_to_cpu(path->nodes[0], &key, path->slots[0]); in search_csum_tree() 304 itemsize = btrfs_item_size(path->nodes[0], path->slots[0]); in search_csum_tree() 321 btrfs_item_key_to_cpu(path->nodes[0], &key, path->slots[0]); in search_csum_tree() 322 itemsize = btrfs_item_size(path->nodes[0], path->slots[0]); in search_csum_tree() 331 read_extent_buffer(path->nodes[0], dst, (unsigned long)item, in search_csum_tree() 492 leaf = path->nodes[ in btrfs_lookup_csums_list() [all...] |
| H A D | tree-log.c | 202 * nodes from multiple log transactions to do sequential in start_log_trans() 410 u32 dst_size = btrfs_item_size(path->nodes[0], in overwrite_item() 430 dst_ptr = btrfs_item_ptr_offset(path->nodes[0], path->slots[0]); in overwrite_item() 431 read_extent_buffer(path->nodes[0], dst_copy, dst_ptr, in overwrite_item() 457 item = btrfs_item_ptr(path->nodes[0], path->slots[0], in overwrite_item() 459 nbytes = btrfs_inode_nbytes(path->nodes[0], item); in overwrite_item() 504 found_size = btrfs_item_size(path->nodes[0], in overwrite_item() 513 dst_ptr = btrfs_item_ptr_offset(path->nodes[0], in overwrite_item() 533 struct extent_buffer *dst_eb = path->nodes[0]; in overwrite_item() 551 S_ISDIR(btrfs_inode_mode(path->nodes[ in overwrite_item() [all...] |
| /kernel/linux/linux-6.6/drivers/interconnect/qcom/ |
| H A D | sc8180x.c | 1350 .nodes = { &slv_ebi } 1357 .nodes = { &slv_ebi } 1364 .nodes = { &slv_qns_llcc } 1370 .nodes = { &slv_qns_mem_noc_hf } 1377 .nodes = { &slv_qns_cdsp_mem_noc } 1383 .nodes = { &mas_qxm_crypto } 1390 .nodes = { &mas_qnm_snoc, 1452 .nodes = { &mas_qxm_camnoc_hf0_uncomp, 1464 .nodes = { &mas_qup_core_0, 1472 .nodes [all...] |
| /base/security/device_security_level/baselib/utils/src/ |
| H A D | utils_state_machine.c | 40 static const StateNode *GetScheduleStateNode(const StateNode *nodes, uint32_t nodeCnt, uint32_t state, uint32_t event) in GetScheduleStateNode() argument 43 const StateNode *node = nodes + i; in GetScheduleStateNode() 76 void ScheduleMachine(const StateNode *nodes, uint32_t nodeCnt, StateMachine *machine, uint32_t event, const void *para) in ScheduleMachine() argument 79 if ((nodes == NULL) || (nodeCnt == 0) || (machine == NULL)) { in ScheduleMachine() 80 SECURITY_LOG_ERROR("invlid params, nodes or context is null"); in ScheduleMachine() 92 const StateNode *node = GetScheduleStateNode(nodes, nodeCnt, state, event); in ScheduleMachine() 109 return ScheduleMachine(nodes, nodeCnt, machine, nextEvent, nextPara); in ScheduleMachine()
|
| /third_party/jinja2/ |
| H A D | nativetypes.py | 8 from . import nodes namespace 17 """Return a native Python type from the list of compiled nodes. If 19 nodes are concatenated as strings. If the result can be parsed with 52 ``str()`` around output nodes. 63 self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo 68 raise nodes.Impossible() 70 if isinstance(node, nodes.TemplateData): 76 self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo 82 self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo 101 nodes ar [all...] |
| /third_party/ltp/testcases/kernel/mem/cpuset/ |
| H A D | cpuset01.c | 39 static int *nodes; variable 62 if (nodes[i] >= MAXNODES) in test_cpuset() 64 set_node(nmask, nodes[i]); in test_cpuset() 71 SAFE_CG_PRINTF(tst_cg, "cpuset.mems", "%d", nodes[0]); in test_cpuset() 72 SAFE_CG_PRINTF(tst_cg, "cpuset.mems", "%d", nodes[1]); in test_cpuset() 82 if (get_allowed_nodes_arr(NH_MEMS | NH_CPUS, &nnodes, &nodes) < 0) in setup()
|
| /kernel/linux/linux-5.10/Documentation/sphinx/ |
| H A D | kfigure.py | 54 from docutils import nodes namespace 60 from sphinx.util.nodes import clean_astext 104 node = nodes.literal_block(data, data) 346 class kernel_image(nodes.image): 365 if len(result) == 2 or isinstance(result[0], nodes.system_message): 383 class kernel_figure(nodes.figure): 402 if len(result) == 2 or isinstance(result[0], nodes.system_message): 449 img_node = nodes.image(node.rawsource, **node.attributes) 458 class kernel_render(nodes.General, nodes [all...] |
| /kernel/linux/linux-6.6/Documentation/sphinx/ |
| H A D | kfigure.py | 56 from docutils import nodes namespace 61 from sphinx.util.nodes import clean_astext 96 node = nodes.literal_block(data, data) 444 class kernel_image(nodes.image): 463 if len(result) == 2 or isinstance(result[0], nodes.system_message): 481 class kernel_figure(nodes.figure): 500 if len(result) == 2 or isinstance(result[0], nodes.system_message): 547 img_node = nodes.image(node.rawsource, **node.attributes) 556 class kernel_render(nodes.General, nodes [all...] |
| /third_party/mesa3d/src/nouveau/codegen/ |
| H A D | nv50_ir_graph.cpp | 203 nodes = new Graph::Node * [graph->getSize() + 1]; in DFSIterator() 206 nodes[graph->getSize()] = 0; in DFSIterator() 216 if (nodes) in ~DFSIterator() 217 delete[] nodes; in ~DFSIterator() 223 nodes[count++] = node; in search() 230 nodes[count++] = node; in search() 235 virtual void *get() const { return nodes[pos]; } in get() 239 Graph::Node **nodes; member in nv50_ir::DFSIterator 259 nodes = new Graph::Node * [graph->getSize() + 1]; in CFGIterator() 262 nodes[grap in CFGIterator() 323 Graph::Node **nodes; global() member in nv50_ir::CFGIterator [all...] |
| /third_party/python/Lib/test/ |
| H A D | test_finalization.py | 360 nodes = [cls() for cls in classes] 361 for i in range(len(nodes)): 362 nodes[i].chain(nodes[i-1]) 363 return nodes 368 nodes = self.build_chain(classes) 369 ids = [id(s) for s in nodes] 370 wrs = [weakref.ref(s) for s in nodes] 371 del nodes 382 nodes [all...] |
| /third_party/ffmpeg/libavcodec/ |
| H A D | ylc.c | 64 Node *nodes, int node, in get_tree_codes() 69 s = nodes[node].sym; in get_tree_codes() 78 get_tree_codes(bits, lens, xlat, nodes, nodes[node].l, pfx, pl, in get_tree_codes() 81 get_tree_codes(bits, lens, xlat, nodes, nodes[node].r, pfx, pl, in get_tree_codes() 88 Node nodes[512]; in build_vlc() local 97 nodes[i].count = table[i]; in build_vlc() 98 nodes[i].sym = i; in build_vlc() 99 nodes[ in build_vlc() 63 get_tree_codes(uint32_t *bits, int16_t *lens, uint8_t *xlat, Node *nodes, int node, uint32_t pfx, int pl, int *pos) get_tree_codes() argument [all...] |
| /kernel/linux/linux-5.10/drivers/dma/ |
| H A D | uniphier-xdmac.c | 83 struct uniphier_xdmac_desc_node nodes[]; member 140 src_addr = xd->nodes[xd->cur_node].src; in uniphier_xdmac_chan_start() 141 dst_addr = xd->nodes[xd->cur_node].dst; in uniphier_xdmac_chan_start() 142 its = xd->nodes[xd->cur_node].burst_size; in uniphier_xdmac_chan_start() 143 tnum = xd->nodes[xd->cur_node].nr_burst; in uniphier_xdmac_chan_start() 295 xd = kzalloc(struct_size(xd, nodes, nr), GFP_NOWAIT); in uniphier_xdmac_prep_dma_memcpy() 301 xd->nodes[i].src = src; in uniphier_xdmac_prep_dma_memcpy() 302 xd->nodes[i].dst = dst; in uniphier_xdmac_prep_dma_memcpy() 303 xd->nodes[i].burst_size = burst_size; in uniphier_xdmac_prep_dma_memcpy() 304 xd->nodes[ in uniphier_xdmac_prep_dma_memcpy() [all...] |
| /kernel/linux/linux-6.6/drivers/dma/ |
| H A D | uniphier-xdmac.c | 83 struct uniphier_xdmac_desc_node nodes[]; member 140 src_addr = xd->nodes[xd->cur_node].src; in uniphier_xdmac_chan_start() 141 dst_addr = xd->nodes[xd->cur_node].dst; in uniphier_xdmac_chan_start() 142 its = xd->nodes[xd->cur_node].burst_size; in uniphier_xdmac_chan_start() 143 tnum = xd->nodes[xd->cur_node].nr_burst; in uniphier_xdmac_chan_start() 295 xd = kzalloc(struct_size(xd, nodes, nr), GFP_NOWAIT); in uniphier_xdmac_prep_dma_memcpy() 301 xd->nodes[i].src = src; in uniphier_xdmac_prep_dma_memcpy() 302 xd->nodes[i].dst = dst; in uniphier_xdmac_prep_dma_memcpy() 303 xd->nodes[i].burst_size = burst_size; in uniphier_xdmac_prep_dma_memcpy() 304 xd->nodes[ in uniphier_xdmac_prep_dma_memcpy() [all...] |
| /third_party/node/test/common/ |
| H A D | heap.js | 21 const nodes = 22 readHeapInfo(dump.nodes, meta.node_fields, meta.node_types, dump.strings); 26 for (const node of nodes) { 34 while (edgeIndex === nodes[fromNodeIndex].edge_count) { 38 const toNode = nodes[to_node / meta.node_fields.length]; 39 const fromNode = nodes[fromNodeIndex]; 51 for (const node of nodes) { 55 return nodes;
|
| /foundation/graphic/graphic_3d/lume/LumeRender/src/loader/ |
| H A D | render_node_graph_loader.cpp | 182 if (const auto nodes = json.find("nodes"); nodes) { in LoadString() 183 if (nodes->is_array()) { in LoadString() 184 nodeDescriptors.reserve(nodes->array_.size()); in LoadString() 185 for (auto const& node : nodes->array_) { in LoadString() 195 finalResult.error += "\"nodes\" must to be an array."; in LoadString() 199 if (const auto nodes = json.find("renderNodeGraphOutputResources"); nodes) { in LoadString() 200 outputResources.reserve(nodes in LoadString() [all...] |
| /kernel/linux/linux-5.10/drivers/interconnect/qcom/ |
| H A D | osm-l3.c | 52 * struct qcom_icc_node - Qualcomm specific interconnect nodes 54 * @links: an array of nodes where we can go next while traversing 68 const struct qcom_icc_node **nodes; member 93 .nodes = sdm845_osm_l3_nodes, 109 .nodes = sc7180_osm_l3_nodes, 125 .nodes = sm8150_osm_l3_nodes, 141 .nodes = sm8250_epss_l3_nodes, 163 list_for_each_entry(n, &provider->nodes, node_list) in qcom_icc_set() 258 qnodes = desc->nodes; in qcom_osm_l3_probe() 261 data = devm_kzalloc(&pdev->dev, struct_size(data, nodes, num_node in qcom_osm_l3_probe() [all...] |
| /kernel/linux/linux-5.10/drivers/gpu/drm/nouveau/nvkm/core/ |
| H A D | mm.c | 26 #define node(root, dir) ((root)->nl_entry.dir == &mm->nodes) ? NULL : \ 36 list_for_each_entry(node, &mm->nodes, nl_entry) { in nvkm_mm_dump() 246 prev = list_last_entry(&mm->nodes, typeof(*node), nl_entry); in nvkm_mm_init() 255 list_add_tail(&node->nl_entry, &mm->nodes); in nvkm_mm_init() 259 INIT_LIST_HEAD(&mm->nodes); in nvkm_mm_init() 275 list_add_tail(&node->nl_entry, &mm->nodes); in nvkm_mm_init() 286 int nodes = 0; in nvkm_mm_fini() local 291 list_for_each_entry(node, &mm->nodes, nl_entry) { in nvkm_mm_fini() 293 if (++nodes > mm->heap_nodes) { in nvkm_mm_fini() 300 list_for_each_entry_safe(node, temp, &mm->nodes, nl_entr in nvkm_mm_fini() [all...] |