|
26 | 26 | },
|
27 | 27 | "outputs": [],
|
28 | 28 | "source": [
|
29 |
| - "# Authors: Emmanuelle Gouillart <emmanuelle.gouillart@normalesup.org>\n# Gael Varoquaux <gael.varoquaux@normalesup.org>\n# License: BSD 3 clause\n\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nfrom sklearn.feature_extraction import image\nfrom sklearn.cluster import spectral_clustering\n\nl = 100\nx, y = np.indices((l, l))\n\ncenter1 = (28, 24)\ncenter2 = (40, 50)\ncenter3 = (67, 58)\ncenter4 = (24, 70)\n\nradius1, radius2, radius3, radius4 = 16, 14, 15, 14\n\ncircle1 = (x - center1[0]) ** 2 + (y - center1[1]) ** 2 < radius1**2\ncircle2 = (x - center2[0]) ** 2 + (y - center2[1]) ** 2 < radius2**2\ncircle3 = (x - center3[0]) ** 2 + (y - center3[1]) ** 2 < radius3**2\ncircle4 = (x - center4[0]) ** 2 + (y - center4[1]) ** 2 < radius4**2\n\n# #############################################################################\n# 4 circles\nimg = circle1 + circle2 + circle3 + circle4\n\n# We use a mask that limits to the foreground: the problem that we are\n# interested in here is not separating the objects from the background,\n# but separating them one from the other.\nmask = img.astype(bool)\n\nimg = img.astype(float)\nimg += 1 + 0.2 * np.random.randn(*img.shape)\n\n# Convert the image into a graph with the value of the gradient on the\n# edges.\ngraph = image.img_to_graph(img, mask=mask)\n\n# Take a decreasing function of the gradient: we take it weakly\n# dependent from the gradient the segmentation is close to a voronoi\ngraph.data = np.exp(-graph.data / graph.data.std())\n\n# Force the solver to be arpack, since amg is numerically\n# unstable on this example\nlabels = spectral_clustering(graph, n_clusters=4, eigen_solver=\"arpack\")\nlabel_im = np.full(mask.shape, -1.0)\nlabel_im[mask] = labels\n\nplt.matshow(img)\nplt.matshow(label_im)\n\n# #############################################################################\n# 2 circles\nimg = circle1 + circle2\nmask = img.astype(bool)\nimg = img.astype(float)\n\nimg += 1 + 0.2 * np.random.randn(*img.shape)\n\ngraph = image.img_to_graph(img, mask=mask)\ngraph.data = np.exp(-graph.data / graph.data.std())\n\nlabels = spectral_clustering(graph, n_clusters=2, eigen_solver=\"arpack\")\nlabel_im = np.full(mask.shape, -1.0)\nlabel_im[mask] = labels\n\nplt.matshow(img)\nplt.matshow(label_im)\n\nplt.show()" |
| 29 | + "# Authors: Emmanuelle Gouillart <emmanuelle.gouillart@normalesup.org>\n# Gael Varoquaux <gael.varoquaux@normalesup.org>\n# License: BSD 3 clause" |
| 30 | + ] |
| 31 | + }, |
| 32 | + { |
| 33 | + "cell_type": "markdown", |
| 34 | + "metadata": {}, |
| 35 | + "source": [ |
| 36 | + "## Generate the data\n\n" |
| 37 | + ] |
| 38 | + }, |
| 39 | + { |
| 40 | + "cell_type": "code", |
| 41 | + "execution_count": null, |
| 42 | + "metadata": { |
| 43 | + "collapsed": false |
| 44 | + }, |
| 45 | + "outputs": [], |
| 46 | + "source": [ |
| 47 | + "import numpy as np\n\nl = 100\nx, y = np.indices((l, l))\n\ncenter1 = (28, 24)\ncenter2 = (40, 50)\ncenter3 = (67, 58)\ncenter4 = (24, 70)\n\nradius1, radius2, radius3, radius4 = 16, 14, 15, 14\n\ncircle1 = (x - center1[0]) ** 2 + (y - center1[1]) ** 2 < radius1**2\ncircle2 = (x - center2[0]) ** 2 + (y - center2[1]) ** 2 < radius2**2\ncircle3 = (x - center3[0]) ** 2 + (y - center3[1]) ** 2 < radius3**2\ncircle4 = (x - center4[0]) ** 2 + (y - center4[1]) ** 2 < radius4**2" |
| 48 | + ] |
| 49 | + }, |
| 50 | + { |
| 51 | + "cell_type": "markdown", |
| 52 | + "metadata": {}, |
| 53 | + "source": [ |
| 54 | + "## Plotting four circles\n\n" |
| 55 | + ] |
| 56 | + }, |
| 57 | + { |
| 58 | + "cell_type": "code", |
| 59 | + "execution_count": null, |
| 60 | + "metadata": { |
| 61 | + "collapsed": false |
| 62 | + }, |
| 63 | + "outputs": [], |
| 64 | + "source": [ |
| 65 | + "img = circle1 + circle2 + circle3 + circle4\n\n# We use a mask that limits to the foreground: the problem that we are\n# interested in here is not separating the objects from the background,\n# but separating them one from the other.\nmask = img.astype(bool)\n\nimg = img.astype(float)\nimg += 1 + 0.2 * np.random.randn(*img.shape)" |
| 66 | + ] |
| 67 | + }, |
| 68 | + { |
| 69 | + "cell_type": "markdown", |
| 70 | + "metadata": {}, |
| 71 | + "source": [ |
| 72 | + "Convert the image into a graph with the value of the gradient on the\nedges.\n\n" |
| 73 | + ] |
| 74 | + }, |
| 75 | + { |
| 76 | + "cell_type": "code", |
| 77 | + "execution_count": null, |
| 78 | + "metadata": { |
| 79 | + "collapsed": false |
| 80 | + }, |
| 81 | + "outputs": [], |
| 82 | + "source": [ |
| 83 | + "from sklearn.feature_extraction import image\n\ngraph = image.img_to_graph(img, mask=mask)" |
| 84 | + ] |
| 85 | + }, |
| 86 | + { |
| 87 | + "cell_type": "markdown", |
| 88 | + "metadata": {}, |
| 89 | + "source": [ |
| 90 | + "Take a decreasing function of the gradient resulting in a segmentation\nthat is close to a Voronoi partition\n\n" |
| 91 | + ] |
| 92 | + }, |
| 93 | + { |
| 94 | + "cell_type": "code", |
| 95 | + "execution_count": null, |
| 96 | + "metadata": { |
| 97 | + "collapsed": false |
| 98 | + }, |
| 99 | + "outputs": [], |
| 100 | + "source": [ |
| 101 | + "graph.data = np.exp(-graph.data / graph.data.std())" |
| 102 | + ] |
| 103 | + }, |
| 104 | + { |
| 105 | + "cell_type": "markdown", |
| 106 | + "metadata": {}, |
| 107 | + "source": [ |
| 108 | + "Here we perform spectral clustering using the arpack solver since amg is\nnumerically unstable on this example. We then plot the results.\n\n" |
| 109 | + ] |
| 110 | + }, |
| 111 | + { |
| 112 | + "cell_type": "code", |
| 113 | + "execution_count": null, |
| 114 | + "metadata": { |
| 115 | + "collapsed": false |
| 116 | + }, |
| 117 | + "outputs": [], |
| 118 | + "source": [ |
| 119 | + "from sklearn.cluster import spectral_clustering\nimport matplotlib.pyplot as plt\n\nlabels = spectral_clustering(graph, n_clusters=4, eigen_solver=\"arpack\")\nlabel_im = np.full(mask.shape, -1.0)\nlabel_im[mask] = labels\n\nfig, axs = plt.subplots(nrows=1, ncols=2, figsize=(10, 5))\naxs[0].matshow(img)\naxs[1].matshow(label_im)\n\nplt.show()" |
| 120 | + ] |
| 121 | + }, |
| 122 | + { |
| 123 | + "cell_type": "markdown", |
| 124 | + "metadata": {}, |
| 125 | + "source": [ |
| 126 | + "## Plotting two circles\nHere we repeat the above process but only consider the first two circles\nwe generated. Note that this results in a cleaner separation between the\ncircles as the region sizes are easier to balance in this case.\n\n" |
| 127 | + ] |
| 128 | + }, |
| 129 | + { |
| 130 | + "cell_type": "code", |
| 131 | + "execution_count": null, |
| 132 | + "metadata": { |
| 133 | + "collapsed": false |
| 134 | + }, |
| 135 | + "outputs": [], |
| 136 | + "source": [ |
| 137 | + "img = circle1 + circle2\nmask = img.astype(bool)\nimg = img.astype(float)\n\nimg += 1 + 0.2 * np.random.randn(*img.shape)\n\ngraph = image.img_to_graph(img, mask=mask)\ngraph.data = np.exp(-graph.data / graph.data.std())\n\nlabels = spectral_clustering(graph, n_clusters=2, eigen_solver=\"arpack\")\nlabel_im = np.full(mask.shape, -1.0)\nlabel_im[mask] = labels\n\nfig, axs = plt.subplots(nrows=1, ncols=2, figsize=(10, 5))\naxs[0].matshow(img)\naxs[1].matshow(label_im)\n\nplt.show()" |
30 | 138 | ]
|
31 | 139 | }
|
32 | 140 | ],
|
|
0 commit comments