## Abstract

We design an optimum receiver to detect a pattern or a reference signal. We design a receiver that detects the signal distorted by a multiplicative noise on the signal itself, as well as by additive noise and by nonoverlapping scene noise. We design the optimum receiver under the condition in which the statistics of the multiplicative and nonoverlapping scene noise are not available. In the case in which additive noise is present and the statistics of the multiplicative noise are not known, the usual method of maximizing the likelihood function to estimate the statistics of stationary noise fails. We overcome this problem by viewing the noise processes as vector random variables and describe two different schemes to estimate the statistics of the multiplicative noise. Using computer simulations we show that, for the images tested here, the optimum receiver performs better than some of the existing receivers.

© 1998 Optical Society of America

Full Article |

PDF Article
### Equations (48)

Equations on this page are rendered with MathJax. Learn more.

(1)
$${H}_{j}:\hspace{1em}s(t)={n}_{r}(t)r(t-{t}_{j})w(t-{t}_{j})+{n}_{b}(t)[1-w(t-{t}_{j})]+{n}_{d}(t).$$
(2)
$$s(i)={n}_{r}(i)r(i-j)w(i-j)+{n}_{b}(i)[1-w(i-j)]+{n}_{d}(i).$$
(4)
$$=\frac{1}{(2\pi {)}^{m/2}({\sigma}_{b}^{2}+{\sigma}_{d}^{2}{)}^{{n}_{o}/2}{\sigma}_{r}^{{n}_{w}}}\times \prod _{i:w(i-j)=1}[{r}^{2}(i-j)+({\sigma}_{d}/{\sigma}_{r}{)}^{2}{]}^{-1/2}\times exp\left\{-\sum _{i=1}^{m}\frac{[s(i)-{m}_{b}-{m}_{d}{]}^{2}[1-w(i-j)]}{2[{\sigma}_{b}^{2}+{\sigma}_{d}^{2}]}\right\}\times exp\left\{-\sum _{i=1}^{m}\frac{[s(i)-r(i-j){m}_{r}-{m}_{d}{]}^{2}w(i-j)}{2[{r}^{2}(i-j){\sigma}_{r}^{2}+{\sigma}_{d}^{2}]}\right\},$$
(5)
$$C=\frac{1}{(2\pi {)}^{m/2}({\sigma}_{b}^{2}+{\sigma}_{d}^{2}{)}^{{n}_{o}/2}{\sigma}_{r}^{{n}_{w}}}\times \prod _{i:w(i-j)=1}[{r}^{2}(i-j)+({\sigma}_{d}/{\sigma}_{r}{)}^{2}{]}^{-1/2}.$$
(6)
$${\mathrm{\lambda}}_{j}=\sum _{i=1}^{m}\left\{\frac{[s(i)-{m}_{b}-{m}_{d}{]}^{2}[1-w(i-j)]}{{\sigma}_{b}^{2}+{\sigma}_{d}^{2}}+\frac{[s(i)-r(i-j){m}_{r}-{m}_{d}{]}^{2}w(i-j)}{{r}^{2}(i-j){\sigma}_{r}^{2}+{\sigma}_{d}^{2}}\right\}.$$
(7)
$$s(i)=r(i-j)w(i-j){n}_{r}(i)+{n}_{d}(i).$$
(8)
$$\stackrel{\u02c6}{X}=S=[s(1),\dots ,s({n}_{w})],$$
(9)
$$E(X)=[r(1-j)w(1-j){m}_{r}+{m}_{d},\dots ,r({n}_{w}-j)w({n}_{w}-j)+{m}_{d}],$$
(10)
$${\stackrel{\u02c6}{m}}_{r}(j)=\frac{{\displaystyle \sum _{i=1}^{m}}[s(i)-{m}_{d}]w(i-j)r(i-j)}{\Vert r{\Vert}_{2}^{2}},$$
(11)
$$\Vert r{\Vert}_{2}={\left[\sum _{i=1}^{m}|r(i-j)w(i-j){|}^{2}\right]}^{1/2}.$$
(12)
$${\overline{x}}_{i}=r(i-j)w(i-j){\stackrel{\u02c6}{m}}_{r}(j)+{m}_{d}.$$
(13)
$$\stackrel{\u02c6}{\mathrm{Var}}({x}_{i})=({s}_{i}-{\overline{x}}_{i}{)}^{2}.$$
(14)
$$\mathrm{Var}({x}_{i})={r}^{2}(i-j)w(i-j){\sigma}_{r}^{2}+{\sigma}_{d}^{2}.$$
(15)
$${\stackrel{\u02c6}{\sigma}}_{r}^{2}(j)=\frac{{\displaystyle \sum _{i=1}^{m}}\{[s(i)-{m}_{d}-r(i-j){\stackrel{\u02c6}{m}}_{r}(j){]}^{2}-{\sigma}_{d}^{2}\}w(i-j){r}^{2}(i-j)}{\Vert r{\Vert}_{4}^{4}},$$
(16)
$$\Vert r{\Vert}_{4}={\left[\sum _{i=1}^{m}|r(i-j){|}^{4}w(i-j)\right]}^{1/4}.$$
(17)
$${\stackrel{\u02c6}{m}}_{b}(j)=\frac{1}{{n}_{o}}\sum _{i=1}^{m}[s(i)-{m}_{d}][1-w(i-j)],$$
(18)
$${\stackrel{\u02c6}{\sigma}}_{b}^{2}(j)=\frac{1}{{n}_{o}}\sum _{i=1}^{m}\{[s(i)-{m}_{d}-{\stackrel{\u02c6}{m}}_{b}(j){]}^{2}-{\sigma}_{d}^{2}\}\times [1-w(i-j)].$$
(19)
$${\stackrel{\u02c6}{\sigma}}_{r}^{\prime 2}(j)=\frac{{\displaystyle \sum _{i=1}^{m}}[s(i)-r(i-j){\stackrel{\u02c6}{m}}_{r}(j){]}^{2}w(i-j){r}^{2}(i-j)}{\Vert r{\Vert}_{4}^{4}},$$
(20)
$${\stackrel{\u02c6}{\sigma}}_{b}^{\prime 2}(j)=\frac{1}{{n}_{o}}\sum _{i=1}^{m}[s(i)-{\stackrel{\u02c6}{m}}_{b}(j){]}^{2}[1-w(i-j)].$$
(21)
$$E[w(i-j){n}_{r}(i)]=E\left[\frac{s(i)w(i-j)}{r(i-j)}\right]-\frac{{m}_{d}w(i-j)}{r(i-j)},$$
(22)
$${m}_{r}(j)=E\left[\frac{[s(i)-{m}_{d}]w(i-j)}{r(i-j)}\right].$$
(23)
$${\stackrel{\u02c6}{m}}_{r}^{\u2033}(j)=\frac{1}{{n}_{w}}\sum _{i=1}^{m}\frac{[s(i)-{m}_{d}]w(i-j)}{r(i-j)},$$
(24)
$$({\stackrel{\u02c6}{\sigma}}^{\u2033}{)}_{r}^{2}(j)=(1/{n}_{w})\sum _{i=1}^{m}{\left\{\frac{[s(i)-{m}_{d}]w(i-j)}{r(i-j)}-{m}_{r}^{\u2033}(j)\right\}}^{2}w(i-j).$$
(25)
$$logP(s|{H}_{j})=-(1/2)[(m/2)log(2\pi )+{n}_{o}+{A}_{j}+{B}_{j}+{C}_{j}],$$
(26)
$${A}_{j}={n}_{o}log[{\stackrel{\u02c6}{\sigma}}_{b}^{\prime 2}(j),$$
(27)
$${B}_{j}=\sum _{i:w(i-j)=1}log[{r}^{2}(i-j){\stackrel{\u02c6}{\sigma}}_{r}^{2}(j)+{\sigma}_{d}^{2}],$$
(28)
$${C}_{j}=\sum _{i=1}^{m}\frac{[s(i)-r(i-j){\stackrel{\u02c6}{m}}_{r}(j)-{m}_{d}{]}^{2}w(i-j)}{[{r}^{2}(i-j){\stackrel{\u02c6}{\sigma}}_{r}^{2}(j)+{\sigma}_{d}^{2}]}.$$
(29)
$${\mathrm{\lambda}}_{j}={A}_{j}+{B}_{j}+{C}_{j}$$
(30)
$$logP(s|{H}_{j})=-[(m/2)log(2\pi )+{K}_{j}+{L}_{j}+{M}_{j}+{N}_{j}],$$
(31)
$${K}_{j}=({n}_{o}/2)log[{\stackrel{\u02c6}{\sigma}}_{b}^{2}(j)+{\sigma}_{d}^{2}]+{n}_{w}log[{\stackrel{\u02c6}{\sigma}}_{r}^{\u2033}(j)],$$
(32)
$${L}_{j}=(1/2)\sum _{i:w(i-j)=1}log\left[{r}^{2}(i-j)+\frac{{\sigma}_{d}^{2}}{{\stackrel{\u02c6}{\sigma}}_{r}^{\u20332}(j)}\right],$$
(33)
$${M}_{j}=\sum _{i=1}^{m}\frac{[s(i)-r(i-j){\stackrel{\u02c6}{m}}_{r}^{\u2033(j)-{m}_{d}}{]}^{2}w(i-j)}{2[{r}^{2}(i-j){\stackrel{\u02c6}{\sigma}}_{r}^{\u20332}(j)+{\sigma}_{d}^{2}]},$$
(34)
$${N}_{j}=\sum _{i=1}^{m}\frac{[s(i)-{\stackrel{\u02c6}{m}}_{b}(j)-{m}_{d}{]}^{2}[1-w(i-j)]}{2[{\stackrel{\u02c6}{\sigma}}_{b}^{2}(j)+{\sigma}_{d}^{2}]}=\frac{{n}_{o}{\stackrel{\u02c6}{\sigma}}_{b}^{2}(j)}{2[{\stackrel{\u02c6}{\sigma}}_{b}^{2}(j)+{\sigma}_{d}^{2}]}.$$
(35)
$${\mathrm{\lambda}}_{j}={K}_{j}+{L}_{j}+{M}_{j}+{N}_{j}$$
(36)
$$P(s|{H}_{j})=\frac{C}{[{\stackrel{\u02c6}{\sigma}}_{b}^{\prime}(j){]}^{{n}_{o}}[{\stackrel{\u02c6}{\sigma}}_{r}^{\u2033}(j){]}^{{n}_{w}}},$$
(37)
$${\mathrm{\lambda}}_{j}={n}_{w}log\left\{(1/{n}_{w})\sum _{i=1}^{m}{\left[\frac{s(i)}{r(i-j)}-{\stackrel{\u02c6}{m}}_{r}^{\u2033}(j)\right]}^{2}w(i-j)\right\}+{n}_{o}log\left\{(1/{n}_{o})\sum _{i=1}^{m}[s(i)-{\stackrel{\u02c6}{m}}_{b}^{\prime}(j){]}^{2}\times [1-w(i-j)]\right\},$$
(38)
$${\stackrel{\u02c6}{m}}_{r}^{\u2033}(j)=\frac{1}{{n}_{w}}\sum _{i=1}^{m}\frac{s(i)w(i-j)}{r(i-j)},$$
(39)
$${\stackrel{\u02c6}{m}}_{b}^{\prime}(j)=\frac{1}{{n}_{o}}\sum _{i=1}^{m}s(i)[1-w(i-j)].$$
(40)
$$s(i)={n}_{r}(i)w(i-j)+{n}_{b}(i)[1-w(i-j)].$$
(41)
$${\mathrm{\lambda}}_{j}={n}_{w}log\left\{(1/{n}_{w})\sum _{i=1}^{m}[s(i)-{\stackrel{\u02c6}{m}}_{r}^{\u2033}(j){]}^{2}w(i-j)\right\}+{n}_{o}log\left\{(1/{n}_{o})\sum _{i=1}^{m}[s(i)-{\stackrel{\u02c6}{m}}_{b}^{\prime}(j){]}^{2}\times [1-w(i-j)]\right\},$$
(42)
$$H(j):\hspace{1em}s(i)=\mathit{ar}(i-j)w(i-j)+{n}_{b}(i)[1-w(i-j)]+{n}_{d}(i).$$
(43)
$${\stackrel{\u02c6}{a}}_{j}=\frac{{\displaystyle \sum _{i=1}^{m}}[s(i)-{m}_{d}]r(i-j)w(i-j)}{{\displaystyle \sum _{i=1}^{m}}{r}^{2}(i-j)w(i-j)},$$
(44)
$${\stackrel{\u02c6}{m}}_{b}(j)=\frac{1}{{n}_{o}}\sum _{i=1}^{m}[s(i)-{m}_{d}][1-w(i-j)],$$
(45)
$${\stackrel{\u02c6}{\sigma}}_{b}^{2}(j)=\frac{1}{{n}_{o}}\sum _{i=1}^{m}[s(i)-{m}_{d}-{\stackrel{\u02c6}{m}}_{b}(j){]}^{2}[1-w(i-j)].$$
(46)
$${\mathrm{\lambda}}_{j}=log[{\stackrel{\u02c6}{\sigma}}_{b}^{2}(j)]+(1/{\sigma}_{d}^{2}){E}_{j},$$
(47)
$${E}_{j}={\displaystyle \sum _{i=1}^{m}}[s(i)-{m}_{d}{]}^{2}w(i-j)-\frac{{\left\{{\displaystyle \sum _{i=1}^{m}}[s(i)-{m}_{d}]r(i-j)w(i-j)\right\}}^{2}}{{\displaystyle \sum _{i=1}^{m}}{r}^{2}(i-j)}.$$
(48)
$${\mathrm{\lambda}}_{j}=\sum _{i=1}^{m}\frac{[s(i)-{m}_{b}-{m}_{d}{]}^{2}[1-w(i-j)]}{[{\sigma}_{b}^{2}+{\sigma}_{d}^{2}]}+(1/{\sigma}_{d}^{2}){E}_{j}.$$