To prove the main results, we need the following lemmas.
Lemma 3.1
(Wang and Lu [7])
Let
\(\{ {{X}_{n}},n\ge1 \}\)
be a sequence of ANA random variables. If
\(\{ {{f}_{n}},n\ge1 \}\)
is a sequence of real nondecreasing (or nonincreasing) functions, then
\(\{ {{f}_{n}} ( {{X}_{n}} ),n\ge1 \}\)
is still a sequence of ANA random variables.
From Wang and Lu’s [7] Rosenthal-type inequality for ANA random variables we obtain the following result.
Lemma 3.2
(Wang and Lu [7])
For a positive integer
\(\mathrm{N} \ge1\)
and
\(0\le s<\frac{1}{12}\), let
\(\{ {{X}_{n}},n\ge 1 \}\)
be a sequence of ANA random variables with
\({{\rho }^{-}} ( \mathrm{N} )\le s\), \(E{{X}_{n}}=0\), and
\(E{{\vert {{X}_{n}} \vert }^{2}}<\infty\). Then for all
\(n\ge1\), there exists a positive constant
\(C=C ( 2,\mathrm{N},s )\)
such that
$$ E \Biggl( \max_{1\le j\le n} {{\Biggl\vert \sum _{i=1}^{j}{{{X}_{i}}} \Biggr\vert }^{2}} \Biggr)\le C\sum_{i=1}^{n}{EX_{i}^{2}}. $$
(3.1)
Lemma 3.3
(Adler and Rosalsky [17]; Adler et al. [18])
Suppose that
\(\{ {{X}_{ni}},i\ge1,n\ge1 \}\)
is an array of random variables stochastically dominated by a random variable
X. Then, for all
\(q>0\)
and
\(x>0\),
$$\begin{aligned}& E{{\vert {{X}_{ni}} \vert }^{q}}I \bigl( \vert {{X}_{ni}} \vert \le x \bigr)\le C \bigl( E{{\vert X \vert }^{q}}I \bigl( \vert X \vert \le x \bigr)+{{x}^{q}}P \bigl( \vert X \vert >x \bigr) \bigr), \end{aligned}$$
(3.2)
$$\begin{aligned}& E{{\vert {{X}_{ni}} \vert }^{q}}I \bigl( \vert {{X}_{ni}} \vert >x \bigr)\le CE{{\vert X \vert }^{q}}I \bigl( \vert X \vert >x \bigr). \end{aligned}$$
(3.3)
Lemma 3.4
(Wu et al. [19])
Let
\(\{ {{a}_{ni}},i\ge1,n\ge1 \}\)
be an array of real constants satisfying (1.1) for some
\(\alpha>0\), and
X
be a random variable. Let
\({{b}_{n}}={{n}^{1/\alpha}}{{ ( \log n )}^{1/\gamma}}\)
for some
\(\gamma>0\). If
\(p>\max \{ \alpha,\gamma \}\), then
$$ \sum_{n=1}^{\infty}{ \frac{1}{nb_{n}^{p}}\sum_{i=1}^{n}{E{{\vert {{a}_{ni}}X \vert }^{p}}I \bigl( \vert {{a}_{ni}}X \vert \le{{b}_{n}} \bigr)}}\leq \left \{ \textstyle\begin{array}{l@{\quad}l} CE|X|^{\alpha}&\textit{for } \alpha>\gamma, \\ CE|X|^{\alpha}\log(1+|X|)&\textit{for } \alpha=\gamma, \\ CE|X|^{\gamma}&\textit{for } \alpha< \gamma. \end{array}\displaystyle \right . $$
(3.4)
Proof of Theorem 2.1
Without loss of generality, assume that \({{a}_{ni}}\ge0\) (otherwise, we shall use \(a_{ni}^{+}\) and \(a_{ni}^{-}\) instead of \({{a}_{ni}}\), and note that \({{a}_{ni}}=a_{ni}^{+}-a_{ni}^{-}\)). For fixed \(n\ge1\), define
$$\begin{aligned}& {{Y}_{ni}}=-{{b}_{n}}I ( {{a}_{ni}} {{X}_{ni}}< -{{b}_{n}} )+{{a}_{ni}} {{X}_{ni}}I \bigl( \vert {{a}_{ni}} {{X}_{ni}} \vert \le {{b}_{n}} \bigr)\text{+} {{b}_{n}}I ( {{a}_{ni}} {{X}_{ni}}>{{b}_{n}} ),\quad i\ge1; \\& {{Z}_{ni}}={{a}_{ni}} {{X}_{ni}}-{{Y}_{ni}}= ( {{a}_{ni}} {{X}_{ni}}+{{b}_{n}} )I ( {{a}_{ni}} {{X}_{ni}}< -{{b}_{n}} )+ ( {{a}_{ni}} {{X}_{ni}}-{{b}_{n}} )I ( {{a}_{ni}} {{X}_{ni}}>{{b}_{n}} ); \\& A=\bigcap_{i=1}^{n}{ ( {{Y}_{ni}}={{a}_{ni}} {{X}_{ni}} )}, \qquad B=\bar{A}=\bigcup_{i=1}^{n}{ ( {{Y}_{ni}}\ne {{a}_{ni}} {{X}_{ni}} )}=\bigcup _{i=1}^{n}{ \bigl( \vert {{a}_{ni}} {{X}_{ni}} \vert >{{b}_{n}} \bigr)}; \\& {{E}_{ni}}= \Biggl( \max_{1\le j\le n} \Biggl\vert \sum _{i=1}^{j}{{{a}_{ni}} {{X}_{ni}}} \Biggr\vert >\varepsilon{{b}_{n}} \Biggr). \end{aligned}$$
It is easy to check that for all \(\varepsilon>0\),
$$ {{E}_{ni}}={{E}_{ni}}A\cup{{E}_{ni}}B\subset \Biggl( \max_{1\le j\le n} \Biggl\vert \sum _{i=1}^{j}{{{Y}_{ni}}} \Biggr\vert > \varepsilon{{b}_{n}} \Biggr)\cup \Biggl( \bigcup _{i=1}^{n}{ \bigl( \vert {{a}_{ni}} {{X}_{ni}} \vert >{{b}_{n}} \bigr)} \Biggr), $$
which implies that
$$\begin{aligned} P ( {{E}_{ni}} ) \le& P \Biggl(\max_{1\le j\le n} \Biggl\vert \sum_{i=1}^{j}{{{Y}_{ni}}} \Biggr\vert >\varepsilon {{b}_{n}} \Biggr)+P \Biggl( \bigcup _{i=1}^{n}{ \bigl( \vert {{a}_{ni}} {{X}_{ni}} \vert >{{b}_{n}} \bigr)} \Biggr) \\ \le& P \Biggl( \max_{1\le j\le n} \Biggl\vert \sum _{i=1}^{j}{ ( {{Y}_{ni}}-E{{Y}_{ni}} )} \Biggr\vert >\varepsilon{{b}_{n}}-\max_{1\le j\le n} \Biggl\vert \sum_{i=1}^{j}{E{{Y}_{ni}}} \Biggr\vert \Biggr) \\ &{}+ \sum_{i=1}^{n}{P \bigl( \vert {{a}_{ni}} {{X}_{ni}} \vert >{{b}_{n}} \bigr)}. \end{aligned}$$
(3.5)
First, we shall show that
$$ \frac{1}{{{b}_{n}}}\max_{1\le j\le n} \Biggl\vert \sum _{i=1}^{j}{E{{Y}_{ni}}} \Biggr\vert \to0\quad \text{as }n\to \infty. $$
(3.6)
For \(0<\alpha\le1\), it follows from (3.2) of Lemma 3.3, the Markov inequality, and \(E{{\vert X \vert }^{\alpha}}<\infty\) that
$$\begin{aligned} \frac{1}{{{b}_{n}}}\max_{1\le j\le n} \Biggl\vert \sum _{i=1}^{j}{E{{Y}_{ni}}} \Biggr\vert \le& C\frac{1}{{{b}_{n}}}\sum_{i=1}^{n}{ \vert E{{Y}_{ni}} \vert } \\ \le& C\frac{1}{{{b}_{n}}}\sum_{i=1}^{n}{E \vert {{a}_{ni}} {{X}_{ni}} \vert I \bigl( \vert {{a}_{ni}} {{X}_{ni}} \vert \le{{b}_{n}} \bigr)}+C\sum_{i=1}^{n}{P \bigl( \vert {{a}_{ni}} {{X}_{ni}} \vert >{{b}_{n}} \bigr)} \\ \le& C\frac{1}{{{b}_{n}}}\sum_{i=1}^{n}{ \bigl( E\vert {{a}_{ni}}X \vert I \bigl( \vert {{a}_{ni}}X \vert \le{{b}_{n}} \bigr)+{{b}_{n}}P \bigl( \vert {{a}_{ni}}X \vert >{{b}_{n}} \bigr) \bigr)} \\ &{}+ C\sum_{i=1}^{n}{P \bigl( \vert {{a}_{ni}}X \vert >{{b}_{n}} \bigr)} \\ \le& C\frac{1}{b_{n}^{\alpha}}\sum_{i=1}^{n}{a_{ni}^{\alpha }E{{ \vert X \vert }^{\alpha}}I \bigl( \vert {{a}_{ni}}X \vert \le {{b}_{n}} \bigr)}+C\frac{1}{b_{n}^{\alpha}}\sum _{i=1}^{n}{a_{ni}^{\alpha}E{{ \vert X \vert }^{\alpha}}} \\ \le& C{{ ( \log n )}^{-\alpha/\gamma}}E{{ \vert X \vert }^{\alpha}}\to0 \quad \text{as }n\to\infty. \end{aligned}$$
(3.7)
From the definition of \({{Z}_{ni}}={{a}_{ni}}{{X}_{ni}}-{{Y}_{ni}}\) we know that when \({{a}_{ni}}{{X}_{ni}}>{{b}_{n}}\), \(0<{{Z}_{ni}}={{a}_{ni}}{{X}_{ni}}-{{b}_{n}}<{{a}_{ni}}{{X}_{ni}}\), and when \({{a}_{ni}}{{X}_{ni}}<-{{b}_{n}}\), \({{a}_{ni}}{{X}_{ni}}<{{Z}_{ni}}={{a}_{ni}}{{X}_{ni}}+{{b}_{n}}<0\). Hence, \(\vert {{Z}_{ni}} \vert <\vert {{a}_{ni}}{{X}_{ni}} \vert I ( \vert {{a}_{ni}}{{X}_{ni}} \vert >{{b}_{n}} )\).
For \(1<\alpha\le2\), it follows from \(E{{X}_{ni}}=0\), (3.3) of Lemma 3.3, and \(E{{\vert X \vert }^{\alpha}}<\infty\) again that
$$\begin{aligned} \frac{1}{{{b}_{n}}}\max_{1\le j\le n} \Biggl\vert \sum _{i=1}^{j}{E{{Y}_{ni}}} \Biggr\vert =& \frac{1}{{{b}_{n}}}\max_{1\le j\le n} \Biggl\vert \sum _{i=1}^{j}{E{{Z}_{ni}}} \Biggr\vert \\ \le& C\frac{1}{{{b}_{n}}}\sum_{i=1}^{n}{E \vert {{Z}_{ni}} \vert } \\ \le& C\frac{1}{{{b}_{n}}}\sum_{i=1}^{n}{E \vert {{a}_{ni}} {{X}_{ni}} \vert I \bigl( \vert {{a}_{ni}} {{X}_{ni}} \vert >{{b}_{n}} \bigr)} \\ \le& C\frac{1}{{{b}_{n}}}\sum_{i=1}^{n}{E \vert {{a}_{ni}}X \vert I \bigl( \vert {{a}_{ni}}X \vert >{{b}_{n}} \bigr)} \\ \le& C\frac{1}{b_{n}^{\alpha}}\sum_{i=1}^{n}{a_{ni}^{\alpha }E{{ \vert X \vert }^{\alpha}}I \bigl( \vert {{a}_{ni}}X \vert >{{b}_{n}} \bigr)} \\ \le& C{{ ( \log n )}^{-\alpha/\gamma}}E{{ \vert X \vert }^{\alpha}}\to0 \quad \text{as }n\to\infty. \end{aligned}$$
(3.8)
By (3.7) and (3.8) we immediately obtain (3.6). Hence, for n large enough,
$$ P ( {{E}_{n}} )\le P \Biggl( \max_{1\le j\le n} \Biggl\vert \sum_{i=1}^{j}{ ( {{Y}_{ni}}-E{{Y}_{ni}} )} \Biggr\vert >\frac{\varepsilon{{b}_{n}}}{2} \Biggr)+\sum_{i=1}^{n}{P \bigl( \vert {{a}_{ni}} {{X}_{ni}} \vert >{{b}_{n}} \bigr)}. $$
(3.9)
To prove (2.2), it suffices to show that
$$\begin{aligned}& I\triangleq\sum_{n=1}^{\infty}{ \frac{1}{n}P \Biggl( \max_{1\le j\le n} \Biggl\vert \sum _{i=1}^{j}{ ( {{Y}_{ni}}-E{{Y}_{ni}} )} \Biggr\vert >\frac{\varepsilon{{b}_{n}}}{2} \Biggr)}< \infty, \end{aligned}$$
(3.10)
$$\begin{aligned}& J\triangleq\sum_{n=1}^{\infty}{ \frac{1}{n}\sum_{i=1}^{n}{P \bigl( \vert {{a}_{ni}} {{X}_{ni}} \vert >{{b}_{n}} \bigr)}}< \infty. \end{aligned}$$
(3.11)
By Lemma 3.1 it obviously follows that \(\{ {{Y}_{ni}}-E{{Y}_{ni}},i\ge1,n\ge1 \}\) is still an array of rowwise ANA random variables. Hence, it follows from the Markov inequality and Lemma 3.2 that
$$\begin{aligned} I \le& C\sum_{n=1}^{\infty}{ \frac{1}{n}\frac {1}{b_{n}^{2}}E \Biggl(\max_{1\le j\le n} {{\Biggl\vert \sum_{i=1}^{j}{ ( {{Y}_{ni}}-E{{Y}_{ni}} )} \Biggr\vert }^{2}} \Biggr)} \\ \le& C\sum_{n=1}^{\infty}{\frac{1}{n} \frac{1}{b_{n}^{2}}\sum_{i=1}^{n}{E{{\vert {{Y}_{ni}}-E{{Y}_{ni}} \vert }^{2}}}} \\ \le& C\sum_{n=1}^{\infty}{\frac{1}{n} \frac{1}{b_{n}^{2}}\sum_{i=1}^{n}{EY_{ni}^{2}}} \\ \le& C\sum_{n=1}^{\infty}{\frac{1}{n} \frac{1}{b_{n}^{2}}\sum_{i=1}^{n}{E{{\vert {{a}_{ni}} {{X}_{ni}} \vert }^{2}}I \bigl( \vert {{a}_{ni}} {{X}_{ni}} \vert \le{{b}_{n}} \bigr)}}+C\sum_{n=1}^{\infty}{\frac{1}{n} \sum_{i=1}^{n}{P \bigl( \vert {{a}_{ni}} {{X}_{ni}} \vert >{{b}_{n}} \bigr)}} \\ \le& C\sum_{n=1}^{\infty}{\frac{1}{n} \frac{1}{b_{n}^{2}}\sum_{i=1}^{n}{E{{\vert {{a}_{ni}}X \vert }^{2}}I \bigl( \vert {{a}_{ni}}X \vert \le{{b}_{n}} \bigr)}} \\ &{}+C\sum _{n=1}^{\infty }{\frac{1}{n}\frac{1}{b_{n}^{\alpha}}\sum _{i=1}^{n}{E{{\vert {{a}_{ni}}X \vert }^{\alpha}}I \bigl( \vert {{a}_{ni}}X \vert >{{b}_{n}} \bigr)}} \\ \triangleq& {{I}_{1}}+{{I}_{2}}. \end{aligned}$$
(3.12)
From Lemma 3.4 (for \(p=2\)) and (2.1) we obtain that \({{I}_{1}}<\infty \). Hence, it follows from (3.2) of Lemma 3.3 and from (1.1) that
$$\begin{aligned} {{I}_{2}} =&C\sum_{n=1}^{\infty}{ \frac{1}{{{n}^{2}}}{{ ( \log n )}^{-\alpha/\gamma}}\sum_{i=1}^{n}{E{{ \vert {{a}_{ni}}X \vert }^{\alpha}}I \bigl( {{\vert {{a}_{ni}}X \vert }^{\alpha}}>n{{ ( \log n )}^{\alpha/\gamma}} \bigr)}} \\ \le& C\sum_{n=1}^{\infty}{\frac{1}{{{n}^{2}}}{{ ( \log n )}^{-\alpha/\gamma}}\sum_{i=1}^{n}{E{{ \vert {{a}_{ni}}X \vert }^{\alpha}}I \biggl( {{\vert X \vert }^{\alpha}}>\frac{n{{ ( \log n )}^{\alpha/\gamma}}}{\sum_{i=1}^{n}{{{\vert {{a}_{ni}} \vert }^{\alpha}}}} \biggr)}} \\ \le& C\sum_{n=1}^{\infty}{\frac{1}{{{n}^{2}}}{{ ( \log n )}^{-\alpha/\gamma}}\sum_{i=1}^{n}{E{{ \vert {{a}_{ni}}X \vert }^{\alpha}}I \bigl( \vert X \vert >{{ ( \log n )}^{1/\gamma}} \bigr)}} \\ \le& C\sum_{n=1}^{\infty}{\frac{1}{n}{{ ( \log n )}^{-\alpha/\gamma}}E{{ \vert X \vert }^{\alpha}}I \bigl( \vert X \vert >{{ ( \log n )}^{1/\gamma}} \bigr)} \\ =& C\sum_{n=1}^{\infty}{\frac{1}{n}{{ ( \log n )}^{-\alpha/\gamma}}\sum_{k=n}^{\infty}{E{{ \vert X \vert }^{\alpha}}I \bigl( {{ ( \log k )}^{1/\gamma}}< \vert X \vert < {{ \bigl( \log ( k+1 ) \bigr)}^{1/\gamma}} \bigr)}} \\ =& C\sum_{k=1}^{\infty}{E{{ \vert X \vert }^{\alpha}}I \bigl( {{ ( \log k )}^{1/\gamma}}< \vert X \vert < {{ \bigl( \log ( k+1 ) \bigr)}^{1/\gamma}} \bigr)\sum _{n=1}^{k}{\frac{1}{n}{{ ( \log n )}^{-\alpha/\gamma}}}}. \end{aligned}$$
Note that
$$ \sum_{n=1}^{k}{\frac{1}{n}{{ ( \log n )}^{-\alpha /\gamma}}}= \left \{ \textstyle\begin{array}{l@{\quad}l} C&\mbox{for } \alpha>\gamma, \\ C\log\log k&\mbox{for } \alpha=\gamma, \\ C{{ ( \log k )}^{1-\alpha/\gamma}}&\mbox{for } \alpha< \gamma. \end{array}\displaystyle \right . $$
Therefore, we obtain that
$$ {{I}_{2}}\le \left \{ \textstyle\begin{array}{l@{\quad}l} CE{{\vert X \vert }^{\alpha}}&\mbox{for } \alpha>\gamma, \\ CE{{\vert X \vert }^{\alpha}}\log ( 1+\vert X \vert )&\mbox{for } \alpha=\gamma, \\ CE{{\vert X \vert }^{\gamma}}&\mbox{for } \alpha< \gamma. \end{array}\displaystyle \right . $$
Under the conditions of Theorem 2.1 it follows that \({{I}_{2}}<\infty \).
By (3.3) of Lemma 3.3 and the proof of \({{I}_{2}}<\infty\),
$$ J\le\sum_{n=1}^{\infty}{ \frac{1}{n}\frac{1}{b_{n}^{\alpha }}\sum_{i=1}^{n}{E{{ \vert {{a}_{ni}}X \vert }^{\alpha}}I \bigl( \vert {{a}_{ni}}X \vert >{{b}_{n}} \bigr)}}< \infty. $$
(3.13)
The proof of Theorem 2.1 is completed. □
Proof of Theorem 2.2
For all \(\varepsilon>0\), we have
$$\begin{aligned}& \sum_{n=1}^{\infty}{ \frac{1}{n}}E \Biggl( \frac {1}{{{b}_{n}}}\max_{1\le j\le n} \Biggl\vert \sum_{i=1}^{j}{{{a}_{ni}} {{X}_{ni}}} \Biggr\vert -\varepsilon \Biggr)_{+}^{q} \\& \quad = \sum_{n=1}^{\infty}{\frac{1}{n} \int_{0}^{\infty}{P \Biggl( \frac{1}{{{b}_{n}}}\max _{1\le j\le n} \Biggl\vert \sum_{i=1}^{j}{{{a}_{ni}} {{X}_{ni}}} \Biggr\vert -\varepsilon >{{t}^{1/q}} \Biggr) \,dt}} \\& \quad = \sum_{n=1}^{\infty}{\frac{1}{n} \int_{0}^{1}{P \Biggl( \frac {1}{{{b}_{n}}}\max _{1\le j\le n} \Biggl\vert \sum_{i=1}^{j}{{{a}_{ni}} {{X}_{ni}}} \Biggr\vert >\varepsilon+{{t}^{1/q}} \Biggr) \,dt}} \\& \qquad {} + \sum_{n=1}^{\infty}{ \frac{1}{n} \int_{1}^{\infty}{P \Biggl( \frac{1}{{{b}_{n}}}\max _{1\le j\le n} \Biggl\vert \sum_{i=1}^{j}{{{a}_{ni}} {{X}_{ni}}} \Biggr\vert >\varepsilon +{{t}^{1/q}} \Biggr) \,dt}} \\& \quad \le \sum_{n=1}^{\infty}{\frac{1}{n}P \Biggl(\max_{1\le j\le n} \Biggl\vert \sum _{i=1}^{j}{{{a}_{ni}} {{X}_{ni}}} \Biggr\vert >\varepsilon{{b}_{n}} \Biggr)} \\& \qquad {} + \sum_{n=1}^{\infty}{ \frac{1}{n} \int_{1}^{\infty}{P \Biggl( \max_{1\le j\le n} \Biggl\vert \sum_{i=1}^{j}{{{a}_{ni}} {{X}_{ni}}} \Biggr\vert >{{b}_{n}} {{t}^{1/q}} \Biggr)\,dt}} \\& \quad \triangleq {{K}_{1}}+{{K}_{2}}. \end{aligned}$$
(3.14)
To prove (2.3), it suffices to prove \({{K}_{1}}<\infty\) and \({{K}_{2}}<\infty\). By Theorem 2.1 we obtain that \({{K}_{1}}<\infty\). By applying a similar notation and the methods of Theorem 2.1, for fixed \(n\ge1\), \(i\ge1\), and all \(t\ge1\), define
$$\begin{aligned}& Y_{ni}'=-{{b}_{n}} {{t}^{1/q}}I \bigl( {{a}_{ni}} {{X}_{ni}}< -{{b}_{n}} {{t}^{1/q}} \bigr)+{{a}_{ni}} {{X}_{ni}}I \bigl( \vert {{a}_{ni}} {{X}_{ni}} \vert \le {{b}_{n}} {{t}^{1/q}} \bigr)\text{+} {{b}_{n}} {{t}^{1/q}}I \bigl( {{a}_{ni}} {{X}_{ni}}>{{b}_{n}} {{t}^{1/q}} \bigr); \\& Z_{ni}'={{a}_{ni}} {{X}_{ni}}-Y_{ni}' \\& \hphantom{Z_{ni}'}= \bigl( {{a}_{ni}} {{X}_{ni}}+{{b}_{n}} {{t}^{1/q}} \bigr)I \bigl( {{a}_{ni}} {{X}_{ni}}< -{{b}_{n}} {{t}^{1/q}} \bigr)+ \bigl( {{a}_{ni}} {{X}_{ni}}-{{b}_{n}} {{t}^{1/q}} \bigr)I \bigl( {{a}_{ni}} {{X}_{ni}}>{{b}_{n}} {{t}^{1/q}} \bigr); \\& {{A}'}=\bigcap_{i=1}^{n}{ \bigl( Y_{ni}'={{a}_{ni}} {{X}_{ni}} \bigr)}, \qquad {{B}'}={{\bar {A}}'}=\bigcup _{i=1}^{n}{ \bigl( Y_{ni}'\ne {{a}_{ni}} {{X}_{ni}} \bigr)}=\bigcup _{i=1}^{n}{ \bigl( \vert {{a}_{ni}} {{X}_{ni}} \vert >{{b}_{n}} {{t}^{1/q}} \bigr)}; \\& E_{ni}'= \Biggl( \max_{1\le j\le n} \Biggl\vert \sum_{i=1}^{j}{{{a}_{ni}} {{X}_{ni}}} \Biggr\vert >{{b}_{n}} {{t}^{1/q}} \Biggr). \end{aligned}$$
It is easy to check that for all \(\varepsilon>0\),
$$\begin{aligned} P \bigl( E_{ni}' \bigr) \le& P \Biggl( \max_{1\le j\le n} \Biggl\vert \sum_{i=1}^{j}{Y_{ni}'} \Biggr\vert >{{b}_{n}} {{t}^{1/q}} \Biggr)+P \Biggl( \bigcup _{i=1}^{n}{ \bigl( \vert {{a}_{ni}} {{X}_{ni}} \vert >{{b}_{n}} {{t}^{1/q}} \bigr)} \Biggr) \\ \le& P \Biggl(\max_{1\le j\le n} \Biggl\vert \sum _{i=1}^{j}{ \bigl( Y_{ni}'-EY_{ni}' \bigr)} \Biggr\vert >{{b}_{n}} {{t}^{1/q}}-\max _{1\le j\le n} \Biggl\vert \sum_{i=1}^{j}{EY_{ni}'} \Biggr\vert \Biggr) \\ &{}+ \sum_{i=1}^{n}{P \bigl( \vert {{a}_{ni}} {{X}_{ni}} \vert >{{b}_{n}} {{t}^{1/q}} \bigr)}. \end{aligned}$$
(3.15)
First, we shall show that
$$ \max_{t\ge1} \frac {1}{{{b}_{n}}{{t}^{1/q}}}\max _{1\le j\le n} \Biggl\vert \sum_{i=1}^{j}{EY_{ni}'} \Biggr\vert \to0 \quad \text{as }n\to \infty. $$
(3.16)
Similarly to the proofs of (3.7) and (3.8), for \(0<\alpha\le1\), it follows from (3.2) of Lemma 3.3, the Markov inequality, and \(E{{\vert X \vert }^{\alpha}}<\infty\) that
$$\begin{aligned} \max_{t\ge1} \frac {1}{{{b}_{n}}{{t}^{1/q}}}\max _{1\le j\le n} \Biggl\vert \sum_{i=1}^{j}{EY_{ni}'} \Biggr\vert \le& C\max_{t\ge 1} \frac{1}{{{b}_{n}}{{t}^{1/q}}}\sum _{i=1}^{n}{\bigl\vert EY_{ni}' \bigr\vert } \\ \le& C\max_{t\ge1} \frac {1}{{{b}_{n}}{{t}^{1/q}}}\sum _{i=1}^{n}{E\vert {{a}_{ni}} {{X}_{ni}} \vert I \bigl( \vert {{a}_{ni}} {{X}_{ni}} \vert \le{{b}_{n}} {{t}^{1/q}} \bigr)} \\ & {} + C\max_{t\ge1} \sum_{i=1}^{n}{P \bigl( \vert {{a}_{ni}} {{X}_{ni}} \vert >{{b}_{n}} {{t}^{1/q}} \bigr)} \\ \le& C\max_{t\ge1} \frac {1}{{{b}_{n}}{{t}^{1/q}}}\sum _{i=1}^{n}{E\vert {{a}_{ni}}X \vert I \bigl( \vert {{a}_{ni}}X \vert \le{{b}_{n}} {{t}^{1/q}} \bigr)} \\ &{} + C\max_{t\ge1} \sum_{i=1}^{n}{P \bigl( \vert {{a}_{ni}}X \vert >{{b}_{n}} {{t}^{1/q}} \bigr)} \\ \le& C\max_{t\ge1} \frac{1}{b_{n}^{\alpha }{{t}^{\alpha/q}}}\sum _{i=1}^{n}{a_{ni}^{\alpha}E{{ \vert X \vert }^{\alpha}}I \bigl( \vert {{a}_{ni}}X \vert \le {{b}_{n}} {{t}^{1/q}} \bigr)} \\ &{} + C\max_{t\ge1} \frac{1}{b_{n}^{\alpha }{{t}^{\alpha/q}}}\sum _{i=1}^{n}{a_{ni}^{\alpha}E{{ \vert X \vert }^{\alpha}}} \\ \le& C{{ ( \log n )}^{-\alpha/\gamma}}E{{ \vert X \vert }^{\alpha}}\to0 \quad \text{as }n\to\infty. \end{aligned}$$
(3.17)
Noting that \(\vert Z_{ni}' \vert <\vert {{a}_{ni}}{{X}_{ni}} \vert I ( \vert {{a}_{ni}}{{X}_{ni}} \vert >{{b}_{n}}{{t}^{1/q}} )\), for \(1<\alpha\le2\), it follows from \(E{{X}_{n}}=0\), (3.3) of Lemma 3.3, and \(E{{\vert X \vert }^{\alpha}}<\infty\) again that
$$\begin{aligned} \max_{t\ge1} \frac {1}{{{b}_{n}}{{t}^{1/q}}}\max _{1\le j\le n} \Biggl\vert \sum_{i=1}^{j}{EY_{ni}'} \Biggr\vert =& \max_{t\ge1} \frac{1}{{{b}_{n}}{{t}^{1/q}}} \max _{1\le j\le n} \Biggl\vert \sum_{i=1}^{j}{EZ_{ni}'} \Biggr\vert \\ \le& C\max_{t\ge1} \frac {1}{{{b}_{n}}{{t}^{1/q}}}\sum _{i=1}^{n}{E\bigl\vert Z_{ni}' \bigr\vert } \\ \le& C\max_{t\ge1} \frac {1}{{{b}_{n}}{{t}^{1/q}}}\sum _{i=1}^{n}{E\vert {{a}_{ni}} {{X}_{ni}} \vert I \bigl( \vert {{a}_{ni}} {{X}_{ni}} \vert >{{b}_{n}} {{t}^{1/q}} \bigr)} \\ \le& C\max_{t\ge1} \frac {1}{{{b}_{n}}{{t}^{1/q}}}\sum _{i=1}^{n}{E\vert {{a}_{ni}}X \vert I \bigl( \vert {{a}_{ni}}X \vert >{{b}_{n}} {{t}^{1/q}} \bigr)} \\ \le& C\max_{t\ge1} \frac{1}{b_{n}^{\alpha }{{t}^{\alpha/q}}}\sum _{i=1}^{n}{a_{ni}^{\alpha}E{{ \vert X \vert }^{\alpha}}I \bigl( \vert {{a}_{ni}}X \vert >{{b}_{n}} {{t}^{1/q}} \bigr)} \\ \le& C{{ ( \log n )}^{-\alpha/\gamma}}E{{ \vert X \vert }^{\alpha}}\to0 \quad \text{as }n\to\infty. \end{aligned}$$
(3.18)
To prove \({{K}_{2}}<\infty\), it suffices to show that
$$\begin{aligned}& {{K}_{21}}\triangleq\sum_{n=1}^{\infty}{ \frac{1}{n} \int _{1}^{\infty}{P \Biggl(\max_{1\le j\le n} \Biggl\vert \sum_{i=1}^{j}{ \bigl( Y_{ni}'-EY_{ni}' \bigr)} \Biggr\vert >\frac{{{b}_{n}}{{t}^{1/q}}}{2} \Biggr)\,dt}}< \infty, \end{aligned}$$
(3.19)
$$\begin{aligned}& {{K}_{22}}\triangleq\sum_{n=1}^{\infty}{ \frac{1}{n} \int _{1}^{\infty}{\sum_{i=1}^{n}{P \bigl( \vert {{a}_{ni}} {{X}_{ni}} \vert >{{b}_{n}} {{t}^{1/q}} \bigr)}\,dt}}< \infty. \end{aligned}$$
(3.20)
Hence, it follows from the Markov inequality and Lemma 3.2 that
$$\begin{aligned} {{K}_{21}} \le& C\sum_{n=1}^{\infty}{ \frac{1}{n} \int _{1}^{\infty}{\frac{1}{b_{n}^{2}{{t}^{2/q}}}E \Biggl(\max _{1\le j\le n} {{\Biggl\vert \sum_{i=1}^{j}{ \bigl( Y_{ni}'-EY_{ni}' \bigr)} \Biggr\vert }^{2}} \Biggr)\,dt}} \\ \le& C\sum_{n=1}^{\infty}{\frac{1}{n} \int_{1}^{\infty}{\frac {1}{b_{n}^{2}{{t}^{2/q}}}\sum _{i=1}^{n}{E{{\bigl\vert Y_{ni}'-EY_{ni}' \bigr\vert }^{2}}}\,dt}} \\ \le& C\sum_{n=1}^{\infty}{\frac{1}{n} \int_{1}^{\infty}{\sum_{i=1}^{n}{P \bigl( \vert {{a}_{ni}} {{X}_{ni}} \vert >{{b}_{n}} {{t}^{1/q}} \bigr)}\,dt}} \\ &{}+ C\sum_{n=1}^{\infty}{\frac{1}{nb_{n}^{2}} \int_{1}^{\infty }{\frac{1}{{{t}^{2/q}}}\sum _{i=1}^{n}{E{{\vert {{a}_{ni}} {{X}_{ni}} \vert }^{2}}I \bigl( \vert {{a}_{ni}} {{X}_{ni}} \vert \le{{b}_{n}} {{t}^{1/q}} \bigr)}\,dt}} \\ \le& C\sum_{n=1}^{\infty}{\frac{1}{n} \int_{1}^{\infty}{\sum_{i=1}^{n}{P \bigl( \vert {{a}_{ni}}X \vert >{{b}_{n}} {{t}^{1/q}} \bigr)}\,dt}} \\ &{}+ C\sum_{n=1}^{\infty}{\frac{1}{nb_{n}^{2}} \int_{1}^{\infty }{\frac{1}{{{t}^{2/q}}}\sum _{i=1}^{n}{E{{\vert {{a}_{ni}}X \vert }^{2}}I \bigl( \vert {{a}_{ni}}X \vert \le{{b}_{n}} \bigr)}\,dt}} \\ &{}+ C\sum_{n=1}^{\infty}{\frac{1}{nb_{n}^{2}} \int_{1}^{\infty }{\frac{1}{{{t}^{2/q}}}\sum _{i=1}^{n}{E{{\vert {{a}_{ni}}X \vert }^{2}}I \bigl( {{b}_{n}}< \vert {{a}_{ni}}X \vert \le {{b}_{n}} {{t}^{1/q}} \bigr)}\,dt}}. \end{aligned}$$
(3.21)
For \(0< q<\alpha\) and \(\sum_{i=1}^{n}{{{\vert {{a}_{ni}} \vert }^{\alpha}}}=O ( n )\), similarly as in the proof of \({{I}_{2}}<\infty\), we obtain that
$$\begin{aligned} K_{22} \leq& \sum_{n=1}^{\infty}{ \frac{1}{n} \int_{1}^{\infty }{\sum_{i=1}^{n}{P \bigl( \vert {{a}_{ni}}X \vert >{{b}_{n}} {{t}^{1/q}} \bigr)}\,dt}} \\ \le& C\sum_{n=1}^{\infty}{\frac{1}{n} \int_{0}^{\infty}{\sum_{i=1}^{n}{P \bigl( \vert {{a}_{ni}}X \vert >{{b}_{n}} {{t}^{1/q}} \bigr)}\,dt}} \\ \le& C\sum_{n=1}^{\infty}{\frac{1}{n} \int_{0}^{\infty}{\sum_{i=1}^{n}{P \biggl( \frac{{{\vert {{a}_{ni}}X \vert }^{q}}}{b_{n}^{q}}>t \biggr)}\,dt}} \\ \le& C\sum_{n=1}^{\infty}{\frac{1}{n}\sum _{i=1}^{n}{\frac{E{{\vert {{a}_{ni}}X \vert }^{q}}}{b_{n}^{q}}I \bigl( \vert {{a}_{ni}}X \vert >{{b}_{n}} \bigr)}} \\ \le& C\sum_{n=1}^{\infty}{\frac{1}{nb_{n}^{\alpha}} \sum_{i=1}^{n}{E{{\vert {{a}_{ni}}X \vert }^{\alpha}}I \bigl( \vert {{a}_{ni}}X \vert >{{b}_{n}} \bigr)}} \\ < & \infty\quad (\text{see the proof of } {{I}_{2}}< \infty). \end{aligned}$$
For \(0< q<\alpha\le2\), it follows from Lemma 3.4 and (2.1) that
$$\begin{aligned} \nabla_{1} \triangleq& \sum_{n=1}^{\infty}{ \frac {1}{nb_{n}^{2}} \int_{1}^{\infty}{\frac{1}{{{t}^{2/q}}}\sum _{i=1}^{n}{E{{\vert {{a}_{ni}}X \vert }^{2}}I \bigl( \vert {{a}_{ni}}X \vert \le{{b}_{n}} \bigr)}\,dt}} \\ \le& C\sum_{n=1}^{\infty}{\frac{1}{n} \frac{1}{b_{n}^{2}}\sum_{i=1}^{n}{E{{\vert {{a}_{ni}}X \vert }^{2}}I \bigl( \vert {{a}_{ni}}X \vert \le{{b}_{n}} \bigr)}} < \infty. \end{aligned}$$
Taking \(t={{x}^{q}}\), by the Markov inequality from (3.2) of Lemma 3.3 it follows that
$$\begin{aligned} \nabla_{2} \triangleq& \sum_{n=1}^{\infty}{ \frac {1}{nb_{n}^{2}} \int_{1}^{\infty}{\frac{1}{{{t}^{2/q}}}\sum _{i=1}^{n}{E{{\vert {{a}_{ni}}X \vert }^{2}}I \bigl( {{b}_{n}}< \vert {{a}_{ni}}X \vert \le{{b}_{n}} {{t}^{1/q}} \bigr)}\,dt}} \\ \le& C\sum_{n=1}^{\infty}{\frac{1}{nb_{n}^{2}} \int _{1}^{\infty}{{{x}^{q-3}}\sum _{i=1}^{n}{E{{\vert {{a}_{ni}}X \vert }^{2}}I \bigl( {{b}_{n}}< \vert {{a}_{ni}}X \vert \le{{b}_{n}}x \bigr)}\, dx}} \\ \le& C\sum_{n=1}^{\infty}{\frac{1}{nb_{n}^{2}} \sum_{m=1}^{\infty}{ \int_{m}^{m+1}{{{x}^{q-3}}\sum _{i=1}^{n}{E{{\vert {{a}_{ni}}X \vert }^{2}}I \bigl( {{b}_{n}}< \vert {{a}_{ni}}X \vert \le{{b}_{n}}x \bigr)}\, dx}}} \\ \le& C\sum_{n=1}^{\infty}{\frac{1}{nb_{n}^{2}} \sum_{m=1}^{\infty}{{{m}^{q-3}}\sum _{i=1}^{n}{E{{\vert {{a}_{ni}}X \vert }^{2}}I \bigl( {{b}_{n}}< \vert {{a}_{ni}}X \vert \le {{b}_{n}} ( m+1 ) \bigr)}}} \\ =& C\sum_{n=1}^{\infty}{\frac{1}{nb_{n}^{2}}\sum _{i=1}^{n}{\sum_{m=1}^{\infty}{ \sum_{s=1}^{m}{{{m}^{q-3}}E{{ \vert {{a}_{ni}}X \vert }^{2}}I \bigl( {{b}_{n}}s< \vert {{a}_{ni}}X \vert \le{{b}_{n}} ( s+1 ) \bigr)}}}} \\ =& C\sum_{n=1}^{\infty}{\frac{1}{nb_{n}^{2}}\sum _{i=1}^{n}{\sum_{s=1}^{\infty}{E{{ \vert {{a}_{ni}}X \vert }^{2}}I \bigl( {{b}_{n}}s< \vert {{a}_{ni}}X \vert \le{{b}_{n}} ( s+1 ) \bigr)\sum_{m=s}^{\infty}{{{m}^{q-2}}}}}} \\ \le& C\sum_{n=1}^{\infty}{\frac{1}{nb_{n}^{2}} \sum_{i=1}^{n}{\sum _{s=1}^{\infty}{E{{ \vert {{a}_{ni}}X \vert }^{2}}I \bigl( {{b}_{n}}s< \vert {{a}_{ni}}X \vert \le{{b}_{n}} ( s+1 ) \bigr){{s}^{q-2}}}}} \\ \le& C\sum_{n=1}^{\infty}{\frac{1}{nb_{n}^{q}} \sum_{i=1}^{n}{E{{\vert {{a}_{ni}}X \vert }^{q}}I \bigl( \vert {{a}_{ni}}X \vert >{{b}_{n}} \bigr)}} \\ \le& C\sum_{n=1}^{\infty}{\frac{1}{nb_{n}^{\alpha}} \sum_{i=1}^{n}{E{{\vert {{a}_{ni}}X \vert }^{\alpha}}I \bigl( \vert {{a}_{ni}}X \vert >{{b}_{n}} \bigr)}} \\ < & \infty \quad (\text{see the proof of } {{I}_{2}}< \infty). \end{aligned}$$
The proof of Theorem 2.2 is completed. □