Lines Matching defs:data
61 * struct uniphier_cache_data - UniPhier outer cache specific data
95 * @data: cache controller specific data
97 static void __uniphier_cache_sync(struct uniphier_cache_data *data)
101 data->op_base + UNIPHIER_SSCOPE);
103 readl_relaxed(data->op_base + UNIPHIER_SSCOPE);
109 * @data: cache controller specific data
111 * @size: data size of range operation (don't care for "all" operation)
114 static void __uniphier_cache_maint_common(struct uniphier_cache_data *data,
147 writel_relaxed(UNIPHIER_SSCOLPQS_EF, data->op_base + UNIPHIER_SSCOLPQS);
152 data->op_base + UNIPHIER_SSCOQM);
156 writel_relaxed(start, data->op_base + UNIPHIER_SSCOQAD);
157 writel_relaxed(size, data->op_base + UNIPHIER_SSCOQSZ);
159 } while (unlikely(readl_relaxed(data->op_base + UNIPHIER_SSCOPPQSEF) &
163 while (likely(readl_relaxed(data->op_base + UNIPHIER_SSCOLPQS) !=
170 static void __uniphier_cache_maint_all(struct uniphier_cache_data *data,
173 __uniphier_cache_maint_common(data, 0, 0,
176 __uniphier_cache_sync(data);
179 static void __uniphier_cache_maint_range(struct uniphier_cache_data *data,
189 start = start & ~(data->line_size - 1);
193 if (unlikely(size >= (unsigned long)(-data->line_size))) {
195 __uniphier_cache_maint_all(data, operation);
203 size = ALIGN(size, data->line_size);
207 data->range_op_max_size);
209 __uniphier_cache_maint_common(data, start, chunk_size,
216 __uniphier_cache_sync(data);
219 static void __uniphier_cache_enable(struct uniphier_cache_data *data, bool on)
226 writel_relaxed(val, data->ctrl_base + UNIPHIER_SSCC);
230 struct uniphier_cache_data *data)
235 writel_relaxed(data->way_mask, data->way_ctrl_base + 4 * cpu);
241 struct uniphier_cache_data *data;
243 list_for_each_entry(data, &uniphier_cache_list, list)
244 __uniphier_cache_maint_range(data, start, end, operation);
249 struct uniphier_cache_data *data;
251 list_for_each_entry(data, &uniphier_cache_list, list)
252 __uniphier_cache_maint_all(data, operation);
282 struct uniphier_cache_data *data;
284 list_for_each_entry_reverse(data, &uniphier_cache_list, list)
285 __uniphier_cache_enable(data, false);
292 struct uniphier_cache_data *data;
296 list_for_each_entry(data, &uniphier_cache_list, list) {
297 __uniphier_cache_enable(data, true);
298 __uniphier_cache_set_active_ways(data);
304 struct uniphier_cache_data *data;
306 list_for_each_entry(data, &uniphier_cache_list, list)
307 __uniphier_cache_sync(data);
318 struct uniphier_cache_data *data;
345 data = kzalloc(sizeof(*data), GFP_KERNEL);
346 if (!data)
349 if (of_property_read_u32(np, "cache-line-size", &data->line_size) ||
350 !is_power_of_2(data->line_size)) {
357 if (of_property_read_u32(np, "cache-sets", &data->nsets) ||
358 !is_power_of_2(data->nsets)) {
366 cache_size == 0 || cache_size % (data->nsets * data->line_size)) {
373 data->way_mask = GENMASK(cache_size / data->nsets / data->line_size - 1,
376 data->ctrl_base = of_iomap(np, 0);
377 if (!data->ctrl_base) {
383 data->rev_base = of_iomap(np, 1);
384 if (!data->rev_base) {
390 data->op_base = of_iomap(np, 2);
391 if (!data->op_base) {
397 data->way_ctrl_base = data->ctrl_base + 0xc00;
400 u32 revision = readl(data->rev_base + UNIPHIER_SSCID);
406 data->range_op_max_size = (u32)1 << 22;
414 data->way_ctrl_base = data->ctrl_base + 0x870;
418 data->way_ctrl_base = data->ctrl_base + 0x840;
425 data->range_op_max_size -= data->line_size;
427 INIT_LIST_HEAD(&data->list);
428 list_add_tail(&data->list, &uniphier_cache_list); /* no mutex */
445 iounmap(data->op_base);
446 iounmap(data->rev_base);
447 iounmap(data->ctrl_base);
448 kfree(data);