Skip to content
Snippets Groups Projects
Commit ea7141be authored by Václav Tran's avatar Václav Tran
Browse files

report added

parent 0250f4be
No related branches found
No related tags found
No related merge requests found
Showing
with 573 additions and 0 deletions
image.png 0 → 100644
image.png

147 KiB

File added
File added
File added
report/img/output.png

127 KiB

report/img/stock_market_pred.png

154 KiB

%% LaTeX template class for technical report
%% for subject MVI in Czech Technical University in Prague (CTU)
%%
%% (c) 2008 Vit Zyka
%%
%% History:
%% 0.1 2008-09-21 new for NRP
%% 0.2 2008-11-12 href for email, unicode in hyperref; tolerance; microtype; cmap
%% \texorpdfstring
%% 0.3 2018-02-12 update for purposes of MI-MVI
\typeout{Class mvi-report v0.3, 2018-02-12, Martin Slapak, Vit Zyka}
%% Options...
\newif\ifCzech
\DeclareOption{czech}{\Czechtrue}
\ProcessOptions
\LoadClass[a4paper,normalheadings]{scrartcl}
%% Dimensions...
\setlength{\columnsep}{10mm}
\renewcommand{\topfraction}{0.9}
\renewcommand{\bottomfraction}{0.9}
\renewcommand{\dbltopfraction}{0.9}
\renewcommand{\textfraction}{0}
\setcounter{topnumber}{4}
\setcounter{bottomnumber}{4}
\setcounter{totalnumber}{4}
\setcounter{dbltopnumber}{4}
\tolerance=300
%% Packages...
\RequirePackage{ifpdf}
\ifCzech
\RequirePackage[czech]{babel}
\fi
\RequirePackage{lmodern}
\RequirePackage{cmap}
\usepackage[T1]{fontenc}
%\usepackage{ae,aecompl}
\usepackage[left=15mm,right=15mm,top=18mm,bottom=20mm,footskip=10mm]{geometry}
\ifpdf
\RequirePackage[pdftex]{graphicx}
\RequirePackage[pdftex]{color}
\else
\RequirePackage{graphicx}
\RequirePackage{color}
\fi
\RequirePackage[pdfpagemode=UseNone,pdfborder=0 0 0,unicode]{hyperref}
\RequirePackage{mflogo}
\RequirePackage{natbib}
\RequirePackage{microtype}
%% Title...
\newtoks\MVIemail \def\email#1{\MVIemail={#1}}
\newtoks\MVIaffiliation \def\affiliation#1{\MVIaffiliation={#1}}
\def\affiliationFont{\normalfont\fontsize{12pt}{14pt}\selectfont}
\AtBeginDocument{%
\expandafter\author\expandafter{%
\@author\\[4pt]\affiliationFont
\def\\{\crcr\affiliationFont}\the\MVIaffiliation\\
\href{mailto:\the\MVIemail}{\affiliationFont\the\MVIemail}}%
\expandafter\date\expandafter{\expandafter\affiliationFont\@date}%
}
\let\maketitleOrig=\maketitle
\def\maketitle{\@ifnextchar[{\domaketitle}{\domaketitle[]}}
\def\domaketitle[#1]{%
\twocolumn[\vskip-2.0cm\maketitleOrig\begin{quotation}#1\end{quotation}\vskip-1.0cm]%
}
%\bigskip
% pokus o usetreni mista v \paragraph
\let\paragraphOrig=\paragraph
\renewcommand\paragraph{
\@startsection
{paragraph} % the name
{4} % the level
{\z@} % the indent
{1.5ex \@plus1ex \@minus.2ex} % the before skip
{-1em} % the after skip
{\normalfont\normalsize\bfseries} % the style
}
\renewcommand{\baselinestretch}{1}
\setlength{\parskip}{-0.3ex}
%% Bibliography...
% citace dle abecedy
\bibliographystyle{plain}
% citace dle poradi, ale nevejde se to
%\bibliographystyle{is-unsrt}
% citace dle poradi, ale vejde se to ;-)
%\bibliographystyle{unsrt}
%% Misc...
\def\CONTEXT{\texorpdfstring{Con\kern-.1em\TeX{}t}{ConTeXt}}
\def\MetaPost{\texorpdfstring{\MP}{MetaPost}}
\endinput
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\
% pokud nechcem aby nam to zmensovalo pismenka v nazvech titulu,
% tak to obklopit chlupatejma zavorkama
@misc{ zizka,
title = {Some interesting source},
author = { Jan {\v Z}i{\v z}ka},
year = {1415},
howpublished = {online},
note = {[cit.~2018--10--11] \url{http://zizka.trocnov/husiti/conspiration.pdf} }
}
@misc{feng2024rnnsneeded,
title={Were RNNs All We Needed?},
author={Leo Feng and Frederick Tung and Mohamed Osama Ahmed and Yoshua Bengio and Hossein Hajimirsadeghi},
year={2024},
eprint={2410.01201},
archivePrefix={arXiv},
primaryClass={cs.LG},
url={https://arxiv.org/abs/2410.01201},
}
@misc{chung2014empiricalevaluationgatedrecurrent,
title={Empirical Evaluation of Gated Recurrent Neural Networks on Sequence Modeling},
author={Junyoung Chung and Caglar Gulcehre and KyungHyun Cho and Yoshua Bengio},
year={2014},
eprint={1412.3555},
archivePrefix={arXiv},
primaryClass={cs.NE},
url={https://arxiv.org/abs/1412.3555},
}
@article{10.1162/neco.1997.9.8.1735,
author = {Hochreiter, Sepp and Schmidhuber, J\"{u}rgen},
title = {Long Short-Term Memory},
year = {1997},
issue_date = {November 15, 1997},
publisher = {MIT Press},
address = {Cambridge, MA, USA},
volume = {9},
number = {8},
issn = {0899-7667},
url = {https://doi.org/10.1162/neco.1997.9.8.1735},
doi = {10.1162/neco.1997.9.8.1735},
abstract = {Learning to store information over extended time intervals by recurrent backpropagation takes a very long time, mostly because of insufficient, decaying error backflow. We briefly review Hochreiter's (1991) analysis of this problem, then address it by introducing a novel, efficient, gradient based method called long short-term memory (LSTM). Truncating the gradient where this does not do harm, LSTM can learn to bridge minimal time lags in excess of 1000 discrete-time steps by enforcing constant error flow through constant error carousels within special units. Multiplicative gate units learn to open and close access to the constant error flow. LSTM is local in space and time; its computational complexity per time step and weight is O. 1. Our experiments with artificial data involve local, distributed, real-valued, and noisy pattern representations. In comparisons with real-time recurrent learning, back propagation through time, recurrent cascade correlation, Elman nets, and neural sequence chunking, LSTM leads to many more successful runs, and learns much faster. LSTM also solves complex, artificial long-time-lag tasks that have never been solved by previous recurrent network algorithms.},
journal = {Neural Comput.},
month = nov,
pages = {1735–1780},
numpages = {46}
}
@misc{oreshkin2020nbeatsneuralbasisexpansion,
title={N-BEATS: Neural basis expansion analysis for interpretable time series forecasting},
author={Boris N. Oreshkin and Dmitri Carpov and Nicolas Chapados and Yoshua Bengio},
year={2020},
eprint={1905.10437},
archivePrefix={arXiv},
primaryClass={cs.LG},
url={https://arxiv.org/abs/1905.10437},
}
@misc{vaswani2023attentionneed,
title={Attention Is All You Need},
author={Ashish Vaswani and Noam Shazeer and Niki Parmar and Jakob Uszkoreit and Llion Jones and Aidan N. Gomez and Lukasz Kaiser and Illia Polosukhin},
year={2023},
eprint={1706.03762},
archivePrefix={arXiv},
primaryClass={cs.CL},
url={https://arxiv.org/abs/1706.03762},
}
@misc{yiasemis2023rnnfromscratch,
author = {Yiasemis, George},
title = {Recurrent Neural Networks from Scratch using PyTorch},
howpublished = {\url{https://github.com/georgeyiasemis/Recurrent-Neural-Networks-from-scratch-using-PyTorch/tree/main}},
year = {2023}
}
File added
%%
%% Created in 2018 by Martin Slapak
%%
%% Based on file for NRP report LaTeX class by Vit Zyka (2008)
%%
%% Compilation:
%% >pdflatex report
%% >bibtex report
%% >pdflatex report
%% >pdflatex report
\documentclass[english]{mvi-report}
\usepackage[utf8]{inputenc}
\title{A first exploration of the new minGRU models for time series analysis}
\author{Václav Tran}
\affiliation{ČVUT - FIT}
\email{tranvacl@fit.cvut.cz}
\def\file#1{{\tt#1}}
\begin{document}
\maketitle
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Introduction}
This work focuses on three time-series related tasks. The first task involves parameter inference, where the goal is to determine key characteristics of a sinusoidal wave, such as amplitude and frequency, directly from a set of observed data points. The second task is time-series forecasting, where the objective is to predict the future evolution of a sinusoidal wave based on historical data. The third task extends the application of time-series forecasting to stock market data.
To address these tasks, we evaluated the performance of the minGRU architecture \cite{feng2024rnnsneeded}, a simplification of GRU architecture \cite{chung2014empiricalevaluationgatedrecurrent}. We compare minGRU with other established architectures, assessing their strengths and limitations in these two tasks. Additionally, in the second task, training time versus dataset size was evaluated for minGRU, GRU, and LSTM \cite{10.1162/neco.1997.9.8.1735} to compare their computational efficiency.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Input Data}
The datasets used in this exploration are tailored to the specific requirements of each task. Each dataset was divided into an 80/20 split for training and testing across all tasks.
\subsection{Parameter Inference}
For the parameter inference task, a custom synthetic dataset was generated, where each sample consists of 100 data points representing a sinusoidal wave with varying amplitude and frequency.
\subsection{Time-Series Forecasting (Sinusoidal Waves)}
For the time-series forecasting of sinusoidal waves, another custom synthetic dataset was generated. Each sample in this dataset represents a sinusoidal wave, consisting of 100 data points as input and one additional target data point for prediction.
\subsection{Stock Market Forecasting}
For the stock market forecasting task, real-world stock data for IBM was sourced using the \texttt{yfinance} library. The dataset spans from \texttt{2014-01-01} to \texttt{2024-04-01} and includes daily stock prices. Preprocessing focused on the closing prices.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Methods}
The methods employed in this study are tailored to the requirements of each task. Different evaluation metrics and model comparisons are used to ensure a quality assessment. All computations were performed on NVIDIA A100 GPU, 20GB MIG instance using MetaCentrum's GPU clusters.
\subsection{Parameter Inference}
For the parameter inference task, the models are evaluated using the Mean Absolute Error (MAE). The performance of the following models is compared:
\begin{itemize}
\item GRU
\item LSTM
\item Transformer-encoder \cite{vaswani2023attentionneed}
\item minGRU
\end{itemize}
The GRU, LSTM, and Transformer-encoder models were implemented using the PyTorch \texttt{nn} library, while the minGRU architecture was implemented from scratch based on its original paper.
\subsection{Time-Series Forecasting (Sinusoidal Waves)}
For time-series forecasting of sinusoidal waves, models are evaluated using the normalized Root Mean Squared Error (nRMSE). The comparison includes the following models:
\begin{itemize}
\item minGRU
\item Transformer encoder-decoder
\item N-BEATS \cite{oreshkin2020nbeatsneuralbasisexpansion}
\item Exponential Smoothing
\end{itemize}
All models except minGRU are implemented and evaluated using the DARTS time-series forecasting library. This task focuses on comparing the ability of the models to predict future values of sinusoidal waves based on historical data. For minGRU, autoregressive prediction was used, where each model used its previous predictions as inputs to forecast future values. During training, a sliding window approach was employed, where the model used the previous 100 data points to predict the next data point.
During evaluation of training time versus dataset size in time-series forecasting of sinusoidal waves, a different implementation of LSTM and GRU was used to account for the high optimization of PyTorch's default implementation \cite{yiasemis2023rnnfromscratch}.
\subsection{Stock Market Forecasting}
For forecasting stock market data, the models are also evaluated using nRMSE. The following models are compared:
\begin{itemize}
\item minGRU
\item Transformer encoder-decoder
\item N-BEATS
\item Exponential Smoothing
\item Prophet
\end{itemize}
As in Task 2, the DARTS library is used for implementing and evaluating most models, with Prophet included as an additional comparison. Once again, for minGRU, autoregressive prediction was used. During training, a sliding window approach was employed, where the model used the previous 60 time steps to predict the subsequent 60 time steps.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% --- VYSLEDKY
\section{Results}
\subsection{Parameter Inference}
For the parameter inference of sinusoidal waves, models were evaluated using the MAE for amplitude and frequency estimation, along with percentage errors relative to the true values. The results calculated on 10000 samples are presented below. The percentage error indicates how large the error is compared to the actual value.
\begin{table}[h]
\centering
\scriptsize
\caption{Performance of models on amplitude estimation.}
\label{tab:amplitude_results}
\begin{tabular}{lcc}
\hline
Model & MAE (units) & Percentage (\%) \\ \hline
MinGRU & 0.13 & 1.32 \\
GRU & \textbf{0.12} & \textbf{1.17} \\
LSTM & 0.15 & 1.51 \\
Transformer Encoder & 0.22 & 2.24 \\ \hline
\end{tabular}
\end{table}
\begin{table}[h]
\centering
\scriptsize
\caption{Performance of models on frequency estimation.}
\label{tab:frequency_results}
\begin{tabular}{lcc}
\hline
Model & MAE (units) & Percentage (\%) \\ \hline
MinGRU & 0.15 & 1.46 \\
GRU & \textbf{0.05} & \textbf{0.48} \\
LSTM & 0.12 & 1.20 \\
Transformer Encoder & 0.21 & 2.09 \\ \hline
\end{tabular}
\end{table}
The GRU model achieved the lowest error rates for both amplitude and frequency estimation. MinGRU and LSTM show comparable performance, while the Transformer had the highest errors in both categories.
\subsection{Time-Series Forecasting (Sinusoidal Waves)}
The dataset used for this task consisted of 5000 samples, where each sample had 100 data points plus one target point. The maximum amplitude and frequency were both set to 10. The models were trained on 4000 samples for 100 epochs and their performance was evaluated on test set, comprised of the remaining 1000 samples. The averaged results of the 1000 samples are presented in Table~\ref{tab:sinusoidal_forecasting}.
\begin{table}[h]
\centering
\caption{Performance of models on sinusoidal wave forecasting (nRMSE).}
\label{tab:sinusoidal_forecasting}
\begin{tabular}{lc}
\hline
Model & Average nRMSE \\ \hline
minGRU & \textbf{1.2603} \\
N-BEATS & 1.7368 \\
Transformer & 1.9388 \\
Exponential Smoothing & 17.0372 \\ \hline
\end{tabular}
\end{table}
\subsubsection{Training Time and Dataset Size}
The evaluation of time-series forecasting models for sinusoidal waves includes a comparison of training times across different dataset sizes.
\begin{figure}[h]
\centering
\includegraphics[width=0.8\linewidth]{img/output.png}
\caption{Training time vs. dataset size for minGRU, GRU, and LSTM}
\label{fig:training_time_comparison}
\end{figure}
As shown in Figure~\ref{fig:training_time_comparison}, the minGRU consistently demonstrates faster training times compared to the LSTM and GRU, particularly as the dataset size increases. This efficiency makes minGRU a suitable choice for large-scale time-series datasets.
\subsection{Stock Market Forecasting}
For stock market data forecasting, the models were evaluated using nRMSE. The results on the test set for IBM stock data are shown in Figure~\ref{fig:stock_market_pred}.
\begin{figure*}[h]
\centering
\includegraphics[width=\textwidth]{img/stock_market_pred.png}
\caption{Stock market prediction visualizations.}
\label{fig:stock_market_pred}
\end{figure*}
\subsection{Discussion}
The results demonstrate that the minGRU architecture effectively balances forecasting accuracy and computational efficiency, outperforming or matching traditional models such as GRU and LSTM in sinusoidal forecasting tasks while offering faster training times.
However, GRU surprisingly outperforms minGRU by a somewhat large margin in estimating the frequency of the sinusoidal wave. This discrepancy may be attributed to GRU's more complex gating mechanisms, which could be required for more accurate frequency estimation in sinusoidal data.
In the stock market forecasting task, minGRU achieved competitive performance compared to established models, although specialized models like Prophet still exhibited superior accuracy.
Overall, these findings suggest that minGRU is a viable and efficient alternative for time-series analysis, particularly in scenarios where computational resources and training speed are critical.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% --- ZAVER
\section{Conclusion}
The minGRU architecture offers a significantly more efficient and scalable alternative to traditional GRU models, demonstrating competitive performance in time-series forecasting tasks.
This analysis supports the adoption of minGRU in various time-series forecasting scenarios, where it consistently matches or outperforms other architectures, such as Transformer and N-BEATS, especially in environments with limited computing resources.
Further analysis could explore the application of minGRU in diverse domains such as weather prediction and anomaly detection, as well as evaluate its scalability and performance in natural language processing tasks to determine whether it can compete effectively with Transformer-based architectures.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% --- Bibliography
%\bibliographystyle{plain-cz-online}
\bibliography{reference}
\end{document}
File added
File added
%% LaTeX template class for technical report
%% for subject MVI in Czech Technical University in Prague (CTU)
%%
%% (c) 2008 Vit Zyka
%%
%% History:
%% 0.1 2008-09-21 new for NRP
%% 0.2 2008-11-12 href for email, unicode in hyperref; tolerance; microtype; cmap
%% \texorpdfstring
%% 0.3 2018-02-12 update for purposes of MI-MVI
\typeout{Class mvi-report v0.3, 2018-02-12, Martin Slapak, Vit Zyka}
%% Options...
\newif\ifCzech
\DeclareOption{czech}{\Czechtrue}
\ProcessOptions
\LoadClass[a4paper,normalheadings]{scrartcl}
%% Dimensions...
\setlength{\columnsep}{10mm}
\renewcommand{\topfraction}{0.9}
\renewcommand{\bottomfraction}{0.9}
\renewcommand{\dbltopfraction}{0.9}
\renewcommand{\textfraction}{0}
\setcounter{topnumber}{4}
\setcounter{bottomnumber}{4}
\setcounter{totalnumber}{4}
\setcounter{dbltopnumber}{4}
\tolerance=300
%% Packages...
\RequirePackage{ifpdf}
\ifCzech
\RequirePackage[czech]{babel}
\fi
\RequirePackage{lmodern}
\RequirePackage{cmap}
\usepackage[T1]{fontenc}
%\usepackage{ae,aecompl}
\usepackage[left=15mm,right=15mm,top=18mm,bottom=20mm,footskip=10mm]{geometry}
\ifpdf
\RequirePackage[pdftex]{graphicx}
\RequirePackage[pdftex]{color}
\else
\RequirePackage{graphicx}
\RequirePackage{color}
\fi
\RequirePackage[pdfpagemode=UseNone,pdfborder=0 0 0,unicode]{hyperref}
\RequirePackage{mflogo}
\RequirePackage{natbib}
\RequirePackage{microtype}
%% Title...
\newtoks\MVIemail \def\email#1{\MVIemail={#1}}
\newtoks\MVIaffiliation \def\affiliation#1{\MVIaffiliation={#1}}
\def\affiliationFont{\normalfont\fontsize{12pt}{14pt}\selectfont}
\AtBeginDocument{%
\expandafter\author\expandafter{%
\@author\\[4pt]\affiliationFont
\def\\{\crcr\affiliationFont}\the\MVIaffiliation\\
\href{mailto:\the\MVIemail}{\affiliationFont\the\MVIemail}}%
\expandafter\date\expandafter{\expandafter\affiliationFont\@date}%
}
\let\maketitleOrig=\maketitle
\def\maketitle{\@ifnextchar[{\domaketitle}{\domaketitle[]}}
\def\domaketitle[#1]{%
\twocolumn[\vskip-2.0cm\maketitleOrig\begin{quotation}#1\end{quotation}\vskip-1.0cm]%
}
%\bigskip
% pokus o usetreni mista v \paragraph
\let\paragraphOrig=\paragraph
\renewcommand\paragraph{
\@startsection
{paragraph} % the name
{4} % the level
{\z@} % the indent
{1.5ex \@plus1ex \@minus.2ex} % the before skip
{-1em} % the after skip
{\normalfont\normalsize\bfseries} % the style
}
\renewcommand{\baselinestretch}{1}
\setlength{\parskip}{-0.3ex}
%% Bibliography...
% citace dle abecedy
\bibliographystyle{plain}
% citace dle poradi, ale nevejde se to
%\bibliographystyle{is-unsrt}
% citace dle poradi, ale vejde se to ;-)
%\bibliographystyle{unsrt}
%% Misc...
\def\CONTEXT{\texorpdfstring{Con\kern-.1em\TeX{}t}{ConTeXt}}
\def\MetaPost{\texorpdfstring{\MP}{MetaPost}}
\endinput
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\
% pokud nechcem aby nam to zmensovalo pismenka v nazvech titulu,
% tak to obklopit chlupatejma zavorkama
@misc{ zizka,
title = {Some interesting source},
author = { Jan {\v Z}i{\v z}ka},
year = {1415},
howpublished = {online},
note = {[cit.~2018--10--11] \url{http://zizka.trocnov/husiti/conspiration.pdf} }
}
File added
%%
%% Created in 2018 by Martin Slapak
%%
%% Based on file for NRP report LaTeX class by Vit Zyka (2008)
%%
%% Compilation:
%% >pdflatex report
%% >bibtex report
%% >pdflatex report
%% >pdflatex report
\documentclass[czech]{mvi-report}
\usepackage[utf8]{inputenc}
\title{Výstižný název vaší semestrální práce}
\author{Jméno Příjmení}
\affiliation{ČVUT - FIT}
\email{username@fit.cvut.cz}
\def\file#1{{\tt#1}}
\begin{document}
\maketitle
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Úvod}
Definice problému/úkolu, který práce řeší\ldots
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Vstupní data}
Původ, proces získání, předzpracování, \ldots
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Metody}
Použité metody, jejich přizpůsobení, aplikace\ldots
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% --- VYSLEDKY
\section{Výsledky}
Jakých výsledků bylo dosaženo, co na ně melo vliv. Srovnání s očekáváním, \emph{diskuze nad výsledky} -- zvláště důležitá v případě, že něco vyšlo \emph{divně}.
\begin{figure}[h]
\centering\leavevmode
\includegraphics[width=.45\linewidth]{img/fit-logo-cz.pdf}\vskip-0.5cm
\caption{Vliv parametru \emph{Y}}
\label{fig:par-y}
\end{figure}
\begin{figure}[h]
\centering\leavevmode
\includegraphics[width=.45\linewidth]{img/fit-logo-en.pdf}\vskip-0.5cm
\caption{Vliv parametru \emph{X}}
\label{fig:par-x}
\end{figure}
Jak je z ilustrací \ref{fig:par-y} a \ref{fig:par-x} patrné, není to totéž, protože\ldots
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% --- ZAVER
\section{Závěr}
K čemu to bylo/je dobré, jak to půjde využít dále, co by šlo ještě vylepšit\ldots
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% --- Bibliography
\nocite{zizka}
%\bibliographystyle{plain-cz-online}
\bibliography{reference}
\end{document}
stock_market_darts.png

133 KiB

File added
stock_market_mingru.png

72 KiB

stock_market_pred.png

154 KiB

0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment