Skip to content

Expose attention center when doing attention-based cropping. #3164

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Nov 28, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions ChangeLog
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
master

- expose location of interest when using attention based cropping [ejoebstl]
- version bump to 8.14
- remove autotools
- remove various obsolete scripts
Expand Down
51 changes: 43 additions & 8 deletions libvips/conversion/smartcrop.c
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@
* - add low and high
* 19/3/20 jcupitt
* - add all
* 26/11/22 ejoebstl
* - expose location of interest when using attention based cropping
*/

/*
Expand Down Expand Up @@ -71,6 +73,9 @@ typedef struct _VipsSmartcrop {
int height;
VipsInteresting interesting;

int attention_x;
int attention_y;

} VipsSmartcrop;

typedef VipsConversionClass VipsSmartcropClass;
Expand Down Expand Up @@ -195,7 +200,7 @@ pythagoras( VipsSmartcrop *smartcrop, VipsImage *in, VipsImage **out )

static int
vips_smartcrop_attention( VipsSmartcrop *smartcrop,
VipsImage *in, int *left, int *top )
VipsImage *in, int *left, int *top, int *attention_x, int *attention_y)
{
/* From smartcrop.js.
*/
Expand Down Expand Up @@ -290,15 +295,20 @@ vips_smartcrop_attention( VipsSmartcrop *smartcrop,
if( vips_sum( &t[14], &t[18], 3, NULL ) ||
vips_gaussblur( t[18], &t[19], sigma, NULL ) ||
vips_max( t[19], &max, "x", &x_pos, "y", &y_pos, NULL ) )
return( -1 );
return( -1 );

/* Transform back into image coordinates.
*/
*attention_x = x_pos / hscale;
*attention_y = y_pos / vscale;

/* Centre the crop over the max.
*/
*left = VIPS_CLIP( 0,
x_pos / hscale - smartcrop->width / 2,
*attention_x - smartcrop->width / 2,
in->Xsize - smartcrop->width );
*top = VIPS_CLIP( 0,
y_pos / vscale - smartcrop->height / 2,
*attention_y - smartcrop->height / 2,
in->Ysize - smartcrop->height );

return( 0 );
Expand All @@ -316,6 +326,9 @@ vips_smartcrop_build( VipsObject *object )
int left;
int top;

int attention_x = 0;
int attention_y = 0;

if( VIPS_OBJECT_CLASS( vips_smartcrop_parent_class )->
build( object ) )
return( -1 );
Expand Down Expand Up @@ -357,7 +370,7 @@ vips_smartcrop_build( VipsObject *object )
break;

case VIPS_INTERESTING_ATTENTION:
if( vips_smartcrop_attention( smartcrop, in, &left, &top ) )
if( vips_smartcrop_attention( smartcrop, in, &left, &top, &attention_x, &attention_y ) )
return( -1 );
break;

Expand All @@ -383,6 +396,11 @@ vips_smartcrop_build( VipsObject *object )
break;
}

g_object_set(smartcrop,
"attention_x", attention_x,
"attention_y", attention_y,
NULL);

if( vips_extract_area( smartcrop->in, &t[1],
left, top,
smartcrop->width, smartcrop->height, NULL ) ||
Expand Down Expand Up @@ -420,7 +438,7 @@ vips_smartcrop_class_init( VipsSmartcropClass *class )
G_STRUCT_OFFSET( VipsSmartcrop, width ),
1, VIPS_MAX_COORD, 1 );

VIPS_ARG_INT( class, "height", 5,
VIPS_ARG_INT( class, "height", 5,
_( "Height" ),
_( "Height of extract area" ),
VIPS_ARGUMENT_REQUIRED_INPUT,
Expand All @@ -434,6 +452,21 @@ vips_smartcrop_class_init( VipsSmartcropClass *class )
G_STRUCT_OFFSET( VipsSmartcrop, interesting ),
VIPS_TYPE_INTERESTING, VIPS_INTERESTING_ATTENTION );

VIPS_ARG_INT( class, "attention_x", 2,
_( "Attention x" ),
_( "Horizontal position of attention centre" ),
VIPS_ARGUMENT_OPTIONAL_OUTPUT,
G_STRUCT_OFFSET( VipsSmartcrop, attention_x ),
0, VIPS_MAX_COORD, 0 );

VIPS_ARG_INT( class, "attention_y", 3,
_( "Attention y" ),
_( "Vertical position of attention centre" ),
VIPS_ARGUMENT_OPTIONAL_OUTPUT,
G_STRUCT_OFFSET( VipsSmartcrop, attention_y ),
0, VIPS_MAX_COORD, 0 );


}

static void
Expand All @@ -451,9 +484,11 @@ vips_smartcrop_init( VipsSmartcrop *smartcrop )
* @...: %NULL-terminated list of optional named arguments
*
* Optional arguments:
*
*
* * @interesting: #VipsInteresting to use to find interesting areas (default: #VIPS_INTERESTING_ATTENTION)
*
* * @attention_x: %gint, horizontal position of attention centre when using attention based cropping
* * @attention_y: %gint, vertical position of attention centre when using attention based cropping
*
* Crop an image down to a specified width and height by removing boring parts.
*
* Use @interesting to pick the method vips uses to decide which bits of the
Expand Down
13 changes: 13 additions & 0 deletions test/test-suite/test_conversion.py
Original file line number Diff line number Diff line change
Expand Up @@ -334,6 +334,19 @@ def test_smartcrop(self):
assert test.width == 100
assert test.height == 100

@pytest.mark.skipif(pyvips.type_find("VipsOperation", "smartcrop") == 0,
reason="no smartcrop, skipping test")
def test_smartcrop_attention(self):
test, opts = self.image.smartcrop(
100, 100,
interesting=pyvips.enums.Interesting.ATTENTION,
attention_x=True, attention_y=True)
assert test.width == 100
assert test.height == 100

assert opts["attention_x"] == 199
assert opts["attention_y"] == 234

def test_falsecolour(self):
for fmt in all_formats:
test = self.colour.cast(fmt)
Expand Down