@@ -1661,6 +1661,7 @@ struct ext4_sb_info {
atomic_t s_mb_num_fragments;
u32 s_mb_frsp_mem_limit;
struct ext4_mb_frsp_lru s_mb_frsp_lru;
+ atomic_t s_mb_ac_id;
/* workqueue for reserved extent conversions (buffered io) */
struct workqueue_struct *rsv_conversion_wq;
@@ -945,6 +945,7 @@ void ext4_mb_frsp_free_tree(struct super_block *sb, struct ext4_frsp_tree *tree)
tree->frsp_len_root = RB_ROOT_CACHED;
out:
mutex_unlock(&tree->frsp_lock);
+ trace_ext4_mb_frsp_free_tree(sb, tree->frsp_index);
}
/*
@@ -1476,6 +1477,7 @@ int ext4_mb_frsp_optimize(struct ext4_allocation_context *ac, int *tree_idx)
mb_debug(ac->ac_sb,
"Optimizer suggestion: found = %d, tree = %d, len = %d, cr = %d\n",
found, *tree_idx, better_len, ac->ac_criteria);
+ trace_ext4_mb_frsp_optimize(ac, found, cache_more_trees, *tree_idx);
return ret;
}
@@ -1546,6 +1548,11 @@ ext4_mb_tree_allocator(struct ext4_allocation_context *ac)
ac->ac_groups_scanned++;
tree_idx = (j % sbi->s_mb_num_frsp_trees);
+ trace_ext4_mb_tree_allocator(ac, tree_idx,
+ sbi->s_mb_num_frsp_trees,
+ atomic_read(&sbi->s_mb_num_frsp_trees_cached),
+ atomic_read(&sbi->s_mb_num_fragments) *
+ sizeof(struct ext4_frsp_node));
ret = ext4_mb_load_allocator(sb,
tree_idx * ext4_flex_bg_size(sbi), &e4b, 0);
if (ret)
@@ -1655,6 +1662,7 @@ int ext4_mb_init_freespace_trees(struct super_block *sb)
if (sbi->s_mb_frsp_mem_limit < EXT4_MB_FRSP_MEM_MIN(sb))
sbi->s_mb_frsp_mem_limit = EXT4_MB_FRSP_MEM_MIN(sb);
atomic_set(&sbi->s_mb_frsp_lru.frsp_active_fragments, 0);
+ atomic_set(&sbi->s_mb_ac_id, 0);
return 0;
}
@@ -5794,6 +5802,7 @@ ext4_mb_initialize_context(struct ext4_allocation_context *ac,
ext4_get_group_no_and_offset(sb, goal, &group, &block);
/* set up allocation goals */
+ ac->ac_id = atomic_inc_return(&sbi->s_mb_ac_id);
ac->ac_b_ex.fe_logical = EXT4_LBLK_CMASK(sbi, ar->logical);
ac->ac_status = AC_STATUS_CONTINUE;
ac->ac_sb = sb;
@@ -171,7 +171,6 @@ struct ext4_tree_extent {
};
struct ext4_allocation_context {
- __u32 ac_id;
struct inode *ac_inode;
struct super_block *ac_sb;
@@ -206,6 +205,8 @@ struct ext4_allocation_context {
* N > 0, the field stores N, otherwise 0 */
__u8 ac_num_optimizations;
__u8 ac_op; /* operation, for history only */
+ __u8 ac_id; /* allocation ID for tracking purpose */
+
struct page *ac_bitmap_page;
struct page *ac_buddy_page;
struct ext4_prealloc_space *ac_pa;
@@ -874,6 +874,118 @@ TRACE_EVENT(ext4_allocate_blocks,
__entry->pleft, __entry->pright)
);
+TRACE_EVENT(ext4_mb_frsp_free_tree,
+ TP_PROTO(struct super_block *sb, int tree),
+
+ TP_ARGS(sb, tree),
+
+ TP_STRUCT__entry(
+ __field( dev_t, dev )
+ __field( int, tree )
+ ),
+
+ TP_fast_assign(
+ __entry->dev = sb->s_dev;
+ __entry->tree = tree;
+ ),
+
+ TP_printk("dev %d,%d tree %d",
+ MAJOR(__entry->dev), MINOR(__entry->dev), __entry->tree)
+);
+
+TRACE_EVENT(ext4_mb_frsp_optimize,
+ TP_PROTO(struct ext4_allocation_context *ac, int found,
+ int cache_more_trees, int tree),
+
+ TP_ARGS(ac, found, cache_more_trees, tree),
+
+ TP_STRUCT__entry(
+ __field( dev_t, dev )
+ __field( ino_t, ino )
+ __field( unsigned int, len )
+ __field( unsigned int, flags )
+ __field( int, found )
+ __field( int, tree )
+ __field( int, num_found )
+ __field( int, attempts )
+ __field( int, ac_id )
+ __field( int, ac_criteria )
+ __field( int, ac_groups_scanned )
+ __field( int, cache_more_trees )
+ ),
+
+ TP_fast_assign(
+ __entry->dev = ac->ac_sb->s_dev;
+ __entry->ino = ac->ac_inode->i_ino;
+ __entry->len = ac->ac_b_tree_ex.te_len;
+ __entry->flags = ac->ac_flags;
+ __entry->found = found;
+ __entry->tree = tree;
+ __entry->attempts = ac->ac_num_optimizations;
+ __entry->ac_id = ac->ac_id;
+ __entry->num_found = ac->ac_found;
+ __entry->ac_criteria = ac->ac_criteria;
+ __entry->ac_groups_scanned = ac->ac_groups_scanned;
+ __entry->cache_more_trees = cache_more_trees;
+ ),
+
+ TP_printk("dev %d,%d ino %lu ac %d flags %s best-len %u num-found %d "
+ "suggest-tree %d cache_more %d attempt %d status %d cr %d "
+ "nflexgrps %d",
+ MAJOR(__entry->dev), MINOR(__entry->dev),
+ (unsigned long) __entry->ino, __entry->ac_id,
+ show_mballoc_flags(__entry->flags),
+ __entry->len, __entry->num_found, __entry->tree,
+ __entry->cache_more_trees, __entry->attempts, __entry->found,
+ __entry->ac_criteria, __entry->ac_groups_scanned)
+);
+
+TRACE_EVENT(ext4_mb_tree_allocator,
+ TP_PROTO(struct ext4_allocation_context *ac, int tree, int num_trees,
+ int num_trees_loaded, int bytes_usage),
+
+ TP_ARGS(ac, tree, num_trees, num_trees_loaded, bytes_usage),
+
+ TP_STRUCT__entry(
+ __field( dev_t, dev )
+ __field( ino_t, ino )
+ __field( unsigned int, len )
+ __field( unsigned int, flags )
+ __field( int, num_trees )
+ __field( int, num_trees_loaded )
+ __field( int, tree )
+ __field( int, ac_id )
+ __field( int, ac_found )
+ __field( int, ac_criteria )
+ __field( int, ac_groups_scanned )
+ __field( int, bytes_usage )
+ ),
+
+ TP_fast_assign(
+ __entry->dev = ac->ac_sb->s_dev;
+ __entry->ino = ac->ac_inode->i_ino;
+ __entry->len = ac->ac_b_tree_ex.te_len;
+ __entry->flags = ac->ac_flags;
+ __entry->num_trees = num_trees;
+ __entry->num_trees_loaded = num_trees_loaded;
+ __entry->tree = tree;
+ __entry->ac_id = ac->ac_id;
+ __entry->ac_found = ac->ac_found;
+ __entry->ac_criteria = ac->ac_criteria;
+ __entry->ac_groups_scanned = ac->ac_groups_scanned;
+ __entry->bytes_usage = bytes_usage;
+ ),
+
+ TP_printk("dev %d,%d ino %lu ac %d flags %s best-len %u found %d current-tree %d "
+ "num-trees %d num-trees-loaded %d cr %d nflexgrps %d mem_bytes %d",
+ MAJOR(__entry->dev), MINOR(__entry->dev),
+ (unsigned long) __entry->ino, __entry->ac_id,
+ show_mballoc_flags(__entry->flags),
+ __entry->len, __entry->ac_found, __entry->tree, __entry->num_trees,
+ __entry->num_trees_loaded, __entry->ac_criteria,
+ __entry->ac_groups_scanned, __entry->bytes_usage)
+);
+
TRACE_EVENT(ext4_free_blocks,
TP_PROTO(struct inode *inode, __u64 block, unsigned long count,
int flags),