|
1 | | -#!/usr/bin/python |
2 | | - |
3 | 1 | """ |
4 | | -Adapted from the original C++ example: densecrf/examples/dense_inference.cpp |
5 | | -http://www.philkr.net/home/densecrf Version 2.2 |
| 2 | +Adapted from the inference.py to demonstate the usage of the util functions. |
6 | 3 | """ |
7 | 4 |
|
| 5 | +import sys |
8 | 6 | import numpy as np |
9 | | -import cv2 |
10 | 7 | import pydensecrf.densecrf as dcrf |
11 | | -from skimage.segmentation import relabel_sequential |
12 | | -import sys |
| 8 | + |
| 9 | +# Get im{read,write} from somewhere. |
| 10 | +try: |
| 11 | + from cv2 import imread, imwrite |
| 12 | +except ImportError: |
| 13 | + # Note that, sadly, skimage unconditionally import scipy and matplotlib, |
| 14 | + # so you'll need them if you don't have OpenCV. But you probably have them. |
| 15 | + from skimage.io import imread, imsave |
| 16 | + imwrite = imsave |
| 17 | + # TODO: Use scipy instead. |
| 18 | + |
| 19 | +from pydensecrf.utils import compute_unary, create_pairwise_bilateral, create_pairwise_gaussian |
13 | 20 |
|
14 | 21 | if len(sys.argv) != 4: |
15 | 22 | print("Usage: python {} IMAGE ANNO OUTPUT".format(sys.argv[0])) |
16 | 23 | print("") |
17 | 24 | print("IMAGE and ANNO are inputs and OUTPUT is where the result should be written.") |
18 | 25 | sys.exit(1) |
19 | 26 |
|
20 | | -img = cv2.imread(sys.argv[1], 1) |
21 | | -labels = relabel_sequential(cv2.imread(sys.argv[2], 0))[0].flatten() |
22 | | -output = sys.argv[3] |
| 27 | +fn_im = sys.argv[1] |
| 28 | +fn_anno = sys.argv[2] |
| 29 | +fn_output = sys.argv[3] |
| 30 | + |
| 31 | +################################## |
| 32 | +### Read images and annotation ### |
| 33 | +################################## |
| 34 | +img = imread(fn_im) |
| 35 | + |
| 36 | +# Convert the annotation's RGB color to a single 32-bit integer color 0xBBGGRR |
| 37 | +anno_rgb = imread(fn_anno).astype(np.uint32) |
| 38 | +anno_lbl = anno_rgb[:,:,0] + (anno_rgb[:,:,1] << 8) + (anno_rgb[:,:,2] << 16) |
| 39 | + |
| 40 | +# Convert the 32bit integer color to 1, 2, ... labels. |
| 41 | +# Note that all-black, i.e. the value 0 for background will stay 0. |
| 42 | +colors, labels = np.unique(anno_lbl, return_inverse=True) |
| 43 | + |
| 44 | +# And create a mapping back from the labels to 32bit integer colors. |
| 45 | +# But remove the all-0 black, that won't exist in the MAP! |
| 46 | +colors = colors[1:] |
| 47 | +colorize = np.empty((len(colors), 3), np.uint8) |
| 48 | +colorize[:,0] = (colors & 0x0000FF) |
| 49 | +colorize[:,1] = (colors & 0x00FF00) >> 8 |
| 50 | +colorize[:,2] = (colors & 0xFF0000) >> 16 |
| 51 | + |
| 52 | +# Compute the number of classes in the label image. |
| 53 | +# We subtract one because the number shouldn't include the value 0 which stands |
| 54 | +# for "unknown" or "unsure". |
| 55 | +M = len(set(labels.flat)) - 1 |
| 56 | +print(M, " labels and \"unknown\" 0: ", set(labels.flat)) |
| 57 | + |
| 58 | +########################### |
| 59 | +### Setup the CRF model ### |
| 60 | +########################### |
| 61 | +use_2d = False |
| 62 | +# use_2d = True |
| 63 | +if use_2d: |
| 64 | + print("Using 2D specialized functions") |
| 65 | + |
| 66 | + # Example using the DenseCRF2D code |
| 67 | + d = dcrf.DenseCRF2D(img.shape[1], img.shape[0], M) |
| 68 | + |
| 69 | + # get unary potentials (neg log probability) |
| 70 | + U = compute_unary(labels, M, GT_PROB=0.7) |
| 71 | + d.setUnaryEnergy(U) |
| 72 | + |
| 73 | + # This adds the color-independent term, features are the locations only. |
| 74 | + d.addPairwiseGaussian(sxy=(3, 3), compat=3, kernel=dcrf.DIAG_KERNEL, |
| 75 | + normalization=dcrf.NORMALIZE_SYMMETRIC) |
| 76 | + |
| 77 | + # This adds the color-dependent term, i.e. features are (x,y,r,g,b). |
| 78 | + d.addPairwiseBilateral(sxy=(80, 80), srgb=(13, 13, 13), rgbim=img, |
| 79 | + compat=10, |
| 80 | + kernel=dcrf.DIAG_KERNEL, |
| 81 | + normalization=dcrf.NORMALIZE_SYMMETRIC) |
| 82 | +else: |
| 83 | + print("Using generic 2D functions") |
| 84 | + |
| 85 | + # Example using the DenseCRF class and the util functions |
| 86 | + d = dcrf.DenseCRF(img.shape[1] * img.shape[0], M) |
| 87 | + |
| 88 | + # get unary potentials (neg log probability) |
| 89 | + U = compute_unary(labels, M, GT_PROB=0.7) |
| 90 | + d.setUnaryEnergy(U) |
23 | 91 |
|
24 | | -M = labels.max() + 1 # number of labels |
| 92 | + # This creates the color-independent features and then add them to the CRF |
| 93 | + feats = create_pairwise_gaussian(sdims=(3, 3), shape=img.shape[:2]) |
| 94 | + d.addPairwiseEnergy(feats, compat=3, |
| 95 | + kernel=dcrf.DIAG_KERNEL, |
| 96 | + normalization=dcrf.NORMALIZE_SYMMETRIC) |
25 | 97 |
|
26 | | -# Setup the CRF model |
27 | | -d = dcrf.DenseCRF2D(img.shape[1], img.shape[0], M) |
| 98 | + # This creates the color-dependent features and then add them to the CRF |
| 99 | + feats = create_pairwise_bilateral(sdims=(80, 80), schan=(13, 13, 13), |
| 100 | + img=img, chdim=2) |
| 101 | + d.addPairwiseEnergy(feats, compat=10, |
| 102 | + kernel=dcrf.DIAG_KERNEL, |
| 103 | + normalization=dcrf.NORMALIZE_SYMMETRIC) |
28 | 104 |
|
29 | | -# Certainty that the ground truth is correct |
30 | | -GT_PROB = 0.5 |
31 | 105 |
|
32 | | -# Simple classifier that is 50% certain that the annotation is correct |
33 | | -u_energy = -np.log(1.0 / M) |
34 | | -n_energy = -np.log((1.0 - GT_PROB) / (M - 1)) |
35 | | -p_energy = -np.log(GT_PROB) |
| 106 | +#################################### |
| 107 | +### Do inference and compute MAP ### |
| 108 | +#################################### |
36 | 109 |
|
37 | | -U = np.zeros((M, img.shape[0] * img.shape[1]), dtype='float32') |
38 | | -U[:, labels > 0] = n_energy |
39 | | -U[labels, np.arange(U.shape[1])] = p_energy |
40 | | -U[:, labels == 0] = u_energy |
41 | | -d.setUnaryEnergy(U) |
| 110 | +# Run five inference steps. |
| 111 | +Q = d.inference(5) |
42 | 112 |
|
43 | | -d.addPairwiseGaussian(sxy=3, compat=3) |
44 | | -d.addPairwiseBilateral(sxy=80, srgb=13, rgbim=img, compat=10) |
| 113 | +# Find out the most probable class for each pixel. |
| 114 | +MAP = np.argmax(Q, axis=0) |
45 | 115 |
|
46 | | -# Do the inference |
47 | | -res = np.argmax(d.inference(5), axis=0).astype('float32') |
| 116 | +# Convert the MAP (labels) back to the corresponding colors and save the image. |
| 117 | +MAP = colorize[MAP,:] |
| 118 | +imsave(fn_output, MAP.reshape(img.shape)) |
48 | 119 |
|
49 | | -res *= 255 / res.max() |
50 | | -res = res.reshape(img.shape[:2]) |
51 | | -cv2.imwrite(output, res.astype('uint8')) |
| 120 | +# Just randomly manually run inference iterations |
| 121 | +Q, tmp1, tmp2 = d.startInference() |
| 122 | +for i in range(5): |
| 123 | + print("KL-divergence at {}: {}".format(i, d.klDivergence(Q))) |
| 124 | + d.stepInference(Q, tmp1, tmp2) |
0 commit comments