void *cache;
u32 cache_dirty;
- unsigned long *cache_present;
- unsigned int cache_present_nbits;
-
struct reg_default *patch;
int patch_regs;
unsigned int reg, unsigned int value);
int regcache_sync(struct regmap *map);
int regcache_sync_block(struct regmap *map, void *block,
+ unsigned long *cache_present,
unsigned int block_base, unsigned int start,
unsigned int end);
bool regcache_set_val(struct regmap *map, void *base, unsigned int idx,
unsigned int val);
int regcache_lookup_reg(struct regmap *map, unsigned int reg);
-int regcache_set_reg_present(struct regmap *map, unsigned int reg);
-
-static inline bool regcache_reg_present(struct regmap *map, unsigned int reg)
-{
- if (!map->cache_present)
- return false;
- if (reg > map->cache_present_nbits)
- return false;
- return map->cache_present[BIT_WORD(reg)] & BIT_MASK(reg);
-}
int _regmap_raw_write(struct regmap *map, unsigned int reg,
const void *val, size_t val_len, bool async);
unsigned int base_reg;
/* block of adjacent registers */
void *block;
+ /* Which registers are present */
+ long *cache_present;
/* number of registers available in the block */
unsigned int blklen;
} __attribute__ ((packed));
struct regcache_rbtree_node *rbnode,
unsigned int idx, unsigned int val)
{
+ set_bit(idx, rbnode->cache_present);
regcache_set_val(map, rbnode->block, idx, val);
}
map->lock(map->lock_arg);
mem_size = sizeof(*rbtree_ctx);
- mem_size += BITS_TO_LONGS(map->cache_present_nbits) * sizeof(long);
for (node = rb_first(&rbtree_ctx->root); node != NULL;
node = rb_next(node)) {
n = container_of(node, struct regcache_rbtree_node, node);
mem_size += sizeof(*n);
mem_size += (n->blklen * map->cache_word_size);
+ mem_size += BITS_TO_LONGS(n->blklen) * sizeof(long);
regcache_rbtree_get_base_top_reg(map, n, &base, &top);
this_registers = ((top - base) / map->reg_stride) + 1;
rbtree_node = rb_entry(next, struct regcache_rbtree_node, node);
next = rb_next(&rbtree_node->node);
rb_erase(&rbtree_node->node, &rbtree_ctx->root);
+ kfree(rbtree_node->cache_present);
kfree(rbtree_node->block);
kfree(rbtree_node);
}
rbnode = regcache_rbtree_lookup(map, reg);
if (rbnode) {
reg_tmp = (reg - rbnode->base_reg) / map->reg_stride;
- if (!regcache_reg_present(map, reg))
+ if (!test_bit(reg_tmp, rbnode->cache_present))
return -ENOENT;
*value = regcache_rbtree_get_register(map, rbnode, reg_tmp);
} else {
{
unsigned int blklen;
unsigned int pos, offset;
+ unsigned long *present;
u8 *blk;
blklen = (top_reg - base_reg) / map->reg_stride + 1;
if (!blk)
return -ENOMEM;
+ present = krealloc(rbnode->cache_present,
+ BITS_TO_LONGS(blklen) * sizeof(*present), GFP_KERNEL);
+ if (!present) {
+ kfree(blk);
+ return -ENOMEM;
+ }
+
/* insert the register value in the correct place in the rbnode block */
- if (pos == 0)
+ if (pos == 0) {
memmove(blk + offset * map->cache_word_size,
blk, rbnode->blklen * map->cache_word_size);
+ bitmap_shift_right(present, present, offset, blklen);
+ }
/* update the rbnode block, its size and the base register */
rbnode->block = blk;
rbnode->blklen = blklen;
rbnode->base_reg = base_reg;
+ rbnode->cache_present = present;
regcache_rbtree_set_register(map, rbnode, pos, value);
return 0;
rbnode->block = kmalloc(rbnode->blklen * map->cache_word_size,
GFP_KERNEL);
- if (!rbnode->block) {
- kfree(rbnode);
- return NULL;
- }
+ if (!rbnode->block)
+ goto err_free;
+
+ rbnode->cache_present = kzalloc(BITS_TO_LONGS(rbnode->blklen) *
+ sizeof(*rbnode->cache_present), GFP_KERNEL);
+ if (!rbnode->cache_present)
+ goto err_free_block;
return rbnode;
+
+err_free_block:
+ kfree(rbnode->block);
+err_free:
+ kfree(rbnode);
+ return NULL;
}
static int regcache_rbtree_write(struct regmap *map, unsigned int reg,
int ret;
rbtree_ctx = map->cache;
- /* update the reg_present bitmap, make space if necessary */
- ret = regcache_set_reg_present(map, reg);
- if (ret < 0)
- return ret;
/* if we can't locate it in the cached rbnode we'll have
* to traverse the rbtree looking for it.
else
end = rbnode->blklen;
- ret = regcache_sync_block(map, rbnode->block, rbnode->base_reg,
- start, end);
+ ret = regcache_sync_block(map, rbnode->block,
+ rbnode->cache_present,
+ rbnode->base_reg, start, end);
if (ret != 0)
return ret;
}
return regmap_async_complete(map);
}
+static int regcache_rbtree_drop(struct regmap *map, unsigned int min,
+ unsigned int max)
+{
+ struct regcache_rbtree_ctx *rbtree_ctx;
+ struct regcache_rbtree_node *rbnode;
+ struct rb_node *node;
+ unsigned int base_reg, top_reg;
+ unsigned int start, end;
+
+ rbtree_ctx = map->cache;
+ for (node = rb_first(&rbtree_ctx->root); node; node = rb_next(node)) {
+ rbnode = rb_entry(node, struct regcache_rbtree_node, node);
+
+ regcache_rbtree_get_base_top_reg(map, rbnode, &base_reg,
+ &top_reg);
+ if (base_reg > max)
+ break;
+ if (top_reg < min)
+ continue;
+
+ if (min > base_reg)
+ start = (min - base_reg) / map->reg_stride;
+ else
+ start = 0;
+
+ if (max < top_reg)
+ end = (max - base_reg) / map->reg_stride + 1;
+ else
+ end = rbnode->blklen;
+
+ bitmap_clear(rbnode->cache_present, start, end - start);
+ }
+
+ return 0;
+}
+
struct regcache_ops regcache_rbtree_ops = {
.type = REGCACHE_RBTREE,
.name = "rbtree",
.exit = regcache_rbtree_exit,
.read = regcache_rbtree_read,
.write = regcache_rbtree_write,
- .sync = regcache_rbtree_sync
+ .sync = regcache_rbtree_sync,
+ .drop = regcache_rbtree_drop,
};
map->reg_defaults_raw = config->reg_defaults_raw;
map->cache_word_size = DIV_ROUND_UP(config->val_bits, 8);
map->cache_size_raw = map->cache_word_size * config->num_reg_defaults_raw;
- map->cache_present = NULL;
- map->cache_present_nbits = 0;
map->cache = NULL;
map->cache_ops = cache_types[i];
BUG_ON(!map->cache_ops);
- kfree(map->cache_present);
kfree(map->reg_defaults);
if (map->cache_free)
kfree(map->reg_defaults_raw);
int regcache_drop_region(struct regmap *map, unsigned int min,
unsigned int max)
{
- unsigned int reg;
int ret = 0;
- if (!map->cache_present && !(map->cache_ops && map->cache_ops->drop))
+ if (!map->cache_ops || !map->cache_ops->drop)
return -EINVAL;
map->lock(map->lock_arg);
trace_regcache_drop_region(map->dev, min, max);
- if (map->cache_present)
- for (reg = min; reg < max + 1; reg++)
- clear_bit(reg, map->cache_present);
-
- if (map->cache_ops && map->cache_ops->drop)
- ret = map->cache_ops->drop(map, min, max);
+ ret = map->cache_ops->drop(map, min, max);
map->unlock(map->lock_arg);
}
EXPORT_SYMBOL_GPL(regcache_cache_bypass);
-int regcache_set_reg_present(struct regmap *map, unsigned int reg)
-{
- unsigned long *cache_present;
- unsigned int cache_present_size;
- unsigned int nregs;
- int i;
-
- nregs = reg + 1;
- cache_present_size = BITS_TO_LONGS(nregs);
- cache_present_size *= sizeof(long);
-
- if (!map->cache_present) {
- cache_present = kmalloc(cache_present_size, GFP_KERNEL);
- if (!cache_present)
- return -ENOMEM;
- bitmap_zero(cache_present, nregs);
- map->cache_present = cache_present;
- map->cache_present_nbits = nregs;
- }
-
- if (nregs > map->cache_present_nbits) {
- cache_present = krealloc(map->cache_present,
- cache_present_size, GFP_KERNEL);
- if (!cache_present)
- return -ENOMEM;
- for (i = 0; i < nregs; i++)
- if (i >= map->cache_present_nbits)
- clear_bit(i, cache_present);
- map->cache_present = cache_present;
- map->cache_present_nbits = nregs;
- }
-
- set_bit(reg, map->cache_present);
- return 0;
-}
-
bool regcache_set_val(struct regmap *map, void *base, unsigned int idx,
unsigned int val)
{
return -ENOENT;
}
+static bool regcache_reg_present(unsigned long *cache_present, unsigned int idx)
+{
+ if (!cache_present)
+ return true;
+
+ return test_bit(idx, cache_present);
+}
+
static int regcache_sync_block_single(struct regmap *map, void *block,
+ unsigned long *cache_present,
unsigned int block_base,
unsigned int start, unsigned int end)
{
for (i = start; i < end; i++) {
regtmp = block_base + (i * map->reg_stride);
- if (!regcache_reg_present(map, regtmp))
+ if (!regcache_reg_present(cache_present, i))
continue;
val = regcache_get_val(map, block, i);
}
static int regcache_sync_block_raw(struct regmap *map, void *block,
+ unsigned long *cache_present,
unsigned int block_base, unsigned int start,
unsigned int end)
{
for (i = start; i < end; i++) {
regtmp = block_base + (i * map->reg_stride);
- if (!regcache_reg_present(map, regtmp)) {
+ if (!regcache_reg_present(cache_present, i)) {
ret = regcache_sync_block_raw_flush(map, &data,
base, regtmp);
if (ret != 0)
}
int regcache_sync_block(struct regmap *map, void *block,
+ unsigned long *cache_present,
unsigned int block_base, unsigned int start,
unsigned int end)
{
if (regmap_can_raw_write(map))
- return regcache_sync_block_raw(map, block, block_base,
- start, end);
+ return regcache_sync_block_raw(map, block, cache_present,
+ block_base, start, end);
else
- return regcache_sync_block_single(map, block, block_base,
- start, end);
+ return regcache_sync_block_single(map, block, cache_present,
+ block_base, start, end);
}