Spaces:
Runtime error
Runtime error
pkiage
commited on
Commit
•
3af04c7
0
Parent(s):
initial commit
Browse files- .gitignore +90 -0
- .slugignore +98 -0
- LICENSE +10 -0
- Makefile +144 -0
- Procfile +1 -0
- README.md +77 -0
- docs/Makefile +153 -0
- docs/commands.rst +10 -0
- docs/conf.py +244 -0
- docs/getting-started.rst +6 -0
- docs/index.rst +24 -0
- docs/make.bat +190 -0
- models/.gitkeep +0 -0
- notebooks/.gitkeep +0 -0
- references/.gitkeep +0 -0
- references/References.md +32 -0
- reports/.gitkeep +0 -0
- reports/figures/.gitkeep +0 -0
- requirements.txt +11 -0
- setup.py +10 -0
- setup.sh +13 -0
- src/__init__.py +0 -0
- src/app.py +125 -0
- src/app_utils.py +17 -0
- src/data/.gitkeep +0 -0
- src/data/__init__.py +0 -0
- src/data/utils.py +24 -0
- src/features/utils.py +117 -0
- src/visualization/.gitkeep +0 -0
- src/visualization/__init__.py +0 -0
- src/visualization/visualize.py +192 -0
- test_environment.py +25 -0
- tox.ini +3 -0
.gitignore
ADDED
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
venv
|
2 |
+
# Byte-compiled / optimized / DLL files
|
3 |
+
__pycache__/
|
4 |
+
*.py[cod]
|
5 |
+
|
6 |
+
# C extensions
|
7 |
+
*.so
|
8 |
+
|
9 |
+
# Distribution / packaging
|
10 |
+
.Python
|
11 |
+
env/
|
12 |
+
build/
|
13 |
+
develop-eggs/
|
14 |
+
dist/
|
15 |
+
downloads/
|
16 |
+
eggs/
|
17 |
+
.eggs/
|
18 |
+
lib/
|
19 |
+
lib64/
|
20 |
+
parts/
|
21 |
+
sdist/
|
22 |
+
var/
|
23 |
+
*.egg-info/
|
24 |
+
.installed.cfg
|
25 |
+
*.egg
|
26 |
+
|
27 |
+
# PyInstaller
|
28 |
+
# Usually these files are written by a python script from a template
|
29 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
30 |
+
*.manifest
|
31 |
+
*.spec
|
32 |
+
|
33 |
+
# Installer logs
|
34 |
+
pip-log.txt
|
35 |
+
pip-delete-this-directory.txt
|
36 |
+
|
37 |
+
# Unit test / coverage reports
|
38 |
+
htmlcov/
|
39 |
+
.tox/
|
40 |
+
.coverage
|
41 |
+
.coverage.*
|
42 |
+
.cache
|
43 |
+
nosetests.xml
|
44 |
+
coverage.xml
|
45 |
+
*.cover
|
46 |
+
|
47 |
+
# Translations
|
48 |
+
*.mo
|
49 |
+
*.pot
|
50 |
+
|
51 |
+
# Django stuff:
|
52 |
+
*.log
|
53 |
+
|
54 |
+
# Sphinx documentation
|
55 |
+
docs/_build/
|
56 |
+
|
57 |
+
# PyBuilder
|
58 |
+
target/
|
59 |
+
|
60 |
+
# DotEnv configuration
|
61 |
+
.env
|
62 |
+
|
63 |
+
# Database
|
64 |
+
*.db
|
65 |
+
*.rdb
|
66 |
+
|
67 |
+
# Pycharm
|
68 |
+
.idea
|
69 |
+
|
70 |
+
# VS Code
|
71 |
+
.vscode/
|
72 |
+
|
73 |
+
# Spyder
|
74 |
+
.spyproject/
|
75 |
+
|
76 |
+
# Jupyter NB Checkpoints
|
77 |
+
.ipynb_checkpoints/
|
78 |
+
|
79 |
+
# exclude data from source control by default
|
80 |
+
/data/
|
81 |
+
|
82 |
+
# Mac OS-specific storage files
|
83 |
+
.DS_Store
|
84 |
+
|
85 |
+
# vim
|
86 |
+
*.swp
|
87 |
+
*.swo
|
88 |
+
|
89 |
+
# Mypy cache
|
90 |
+
.mypy_cache/
|
.slugignore
ADDED
@@ -0,0 +1,98 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
venv
|
2 |
+
|
3 |
+
docs
|
4 |
+
models
|
5 |
+
notebooks
|
6 |
+
reports
|
7 |
+
README
|
8 |
+
Makefile
|
9 |
+
test_environment
|
10 |
+
venv
|
11 |
+
|
12 |
+
# Byte-compiled / optimized / DLL files
|
13 |
+
__pycache__/
|
14 |
+
*.py[cod]
|
15 |
+
|
16 |
+
# C extensions
|
17 |
+
*.so
|
18 |
+
|
19 |
+
# Distribution / packaging
|
20 |
+
.Python
|
21 |
+
env/
|
22 |
+
build/
|
23 |
+
develop-eggs/
|
24 |
+
dist/
|
25 |
+
downloads/
|
26 |
+
eggs/
|
27 |
+
.eggs/
|
28 |
+
lib/
|
29 |
+
lib64/
|
30 |
+
parts/
|
31 |
+
sdist/
|
32 |
+
var/
|
33 |
+
*.egg-info/
|
34 |
+
.installed.cfg
|
35 |
+
*.egg
|
36 |
+
|
37 |
+
# PyInstaller
|
38 |
+
# Usually these files are written by a python script from a template
|
39 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
40 |
+
*.manifest
|
41 |
+
*.spec
|
42 |
+
|
43 |
+
# Installer logs
|
44 |
+
pip-log.txt
|
45 |
+
pip-delete-this-directory.txt
|
46 |
+
|
47 |
+
# Unit test / coverage reports
|
48 |
+
htmlcov/
|
49 |
+
.tox/
|
50 |
+
.coverage
|
51 |
+
.coverage.*
|
52 |
+
.cache
|
53 |
+
nosetests.xml
|
54 |
+
coverage.xml
|
55 |
+
*.cover
|
56 |
+
|
57 |
+
# Translations
|
58 |
+
*.mo
|
59 |
+
*.pot
|
60 |
+
|
61 |
+
# Django stuff:
|
62 |
+
*.log
|
63 |
+
|
64 |
+
# Sphinx documentation
|
65 |
+
docs/_build/
|
66 |
+
|
67 |
+
# PyBuilder
|
68 |
+
target/
|
69 |
+
|
70 |
+
# DotEnv configuration
|
71 |
+
.env
|
72 |
+
|
73 |
+
# Database
|
74 |
+
*.db
|
75 |
+
*.rdb
|
76 |
+
|
77 |
+
# Pycharm
|
78 |
+
.idea
|
79 |
+
|
80 |
+
# VS Code
|
81 |
+
.vscode/
|
82 |
+
|
83 |
+
# Spyder
|
84 |
+
.spyproject/
|
85 |
+
|
86 |
+
# Jupyter NB Checkpoints
|
87 |
+
.ipynb_checkpoints/
|
88 |
+
|
89 |
+
|
90 |
+
# Mac OS-specific storage files
|
91 |
+
.DS_Store
|
92 |
+
|
93 |
+
# vim
|
94 |
+
*.swp
|
95 |
+
*.swo
|
96 |
+
|
97 |
+
# Mypy cache
|
98 |
+
.mypy_cache/
|
LICENSE
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
The MIT License (MIT)
|
3 |
+
Copyright (c) 2022, Author
|
4 |
+
|
5 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
6 |
+
|
7 |
+
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
8 |
+
|
9 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
10 |
+
|
Makefile
ADDED
@@ -0,0 +1,144 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
.PHONY: clean data lint requirements sync_data_to_s3 sync_data_from_s3
|
2 |
+
|
3 |
+
#################################################################################
|
4 |
+
# GLOBALS #
|
5 |
+
#################################################################################
|
6 |
+
|
7 |
+
PROJECT_DIR := $(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
|
8 |
+
BUCKET = [OPTIONAL] your-bucket-for-syncing-data (do not include 's3://')
|
9 |
+
PROFILE = default
|
10 |
+
PROJECT_NAME = tool-time-series-autocorrelation-demo
|
11 |
+
PYTHON_INTERPRETER = python3
|
12 |
+
|
13 |
+
ifeq (,$(shell which conda))
|
14 |
+
HAS_CONDA=False
|
15 |
+
else
|
16 |
+
HAS_CONDA=True
|
17 |
+
endif
|
18 |
+
|
19 |
+
#################################################################################
|
20 |
+
# COMMANDS #
|
21 |
+
#################################################################################
|
22 |
+
|
23 |
+
## Install Python Dependencies
|
24 |
+
requirements: test_environment
|
25 |
+
$(PYTHON_INTERPRETER) -m pip install -U pip setuptools wheel
|
26 |
+
$(PYTHON_INTERPRETER) -m pip install -r requirements.txt
|
27 |
+
|
28 |
+
## Make Dataset
|
29 |
+
data: requirements
|
30 |
+
$(PYTHON_INTERPRETER) src/data/make_dataset.py data/raw data/processed
|
31 |
+
|
32 |
+
## Delete all compiled Python files
|
33 |
+
clean:
|
34 |
+
find . -type f -name "*.py[co]" -delete
|
35 |
+
find . -type d -name "__pycache__" -delete
|
36 |
+
|
37 |
+
## Lint using flake8
|
38 |
+
lint:
|
39 |
+
flake8 src
|
40 |
+
|
41 |
+
## Upload Data to S3
|
42 |
+
sync_data_to_s3:
|
43 |
+
ifeq (default,$(PROFILE))
|
44 |
+
aws s3 sync data/ s3://$(BUCKET)/data/
|
45 |
+
else
|
46 |
+
aws s3 sync data/ s3://$(BUCKET)/data/ --profile $(PROFILE)
|
47 |
+
endif
|
48 |
+
|
49 |
+
## Download Data from S3
|
50 |
+
sync_data_from_s3:
|
51 |
+
ifeq (default,$(PROFILE))
|
52 |
+
aws s3 sync s3://$(BUCKET)/data/ data/
|
53 |
+
else
|
54 |
+
aws s3 sync s3://$(BUCKET)/data/ data/ --profile $(PROFILE)
|
55 |
+
endif
|
56 |
+
|
57 |
+
## Set up python interpreter environment
|
58 |
+
create_environment:
|
59 |
+
ifeq (True,$(HAS_CONDA))
|
60 |
+
@echo ">>> Detected conda, creating conda environment."
|
61 |
+
ifeq (3,$(findstring 3,$(PYTHON_INTERPRETER)))
|
62 |
+
conda create --name $(PROJECT_NAME) python=3
|
63 |
+
else
|
64 |
+
conda create --name $(PROJECT_NAME) python=2.7
|
65 |
+
endif
|
66 |
+
@echo ">>> New conda env created. Activate with:\nsource activate $(PROJECT_NAME)"
|
67 |
+
else
|
68 |
+
$(PYTHON_INTERPRETER) -m pip install -q virtualenv virtualenvwrapper
|
69 |
+
@echo ">>> Installing virtualenvwrapper if not already installed.\nMake sure the following lines are in shell startup file\n\
|
70 |
+
export WORKON_HOME=$$HOME/.virtualenvs\nexport PROJECT_HOME=$$HOME/Devel\nsource /usr/local/bin/virtualenvwrapper.sh\n"
|
71 |
+
@bash -c "source `which virtualenvwrapper.sh`;mkvirtualenv $(PROJECT_NAME) --python=$(PYTHON_INTERPRETER)"
|
72 |
+
@echo ">>> New virtualenv created. Activate with:\nworkon $(PROJECT_NAME)"
|
73 |
+
endif
|
74 |
+
|
75 |
+
## Test python environment is setup correctly
|
76 |
+
test_environment:
|
77 |
+
$(PYTHON_INTERPRETER) test_environment.py
|
78 |
+
|
79 |
+
#################################################################################
|
80 |
+
# PROJECT RULES #
|
81 |
+
#################################################################################
|
82 |
+
|
83 |
+
|
84 |
+
|
85 |
+
#################################################################################
|
86 |
+
# Self Documenting Commands #
|
87 |
+
#################################################################################
|
88 |
+
|
89 |
+
.DEFAULT_GOAL := help
|
90 |
+
|
91 |
+
# Inspired by <http://marmelab.com/blog/2016/02/29/auto-documented-makefile.html>
|
92 |
+
# sed script explained:
|
93 |
+
# /^##/:
|
94 |
+
# * save line in hold space
|
95 |
+
# * purge line
|
96 |
+
# * Loop:
|
97 |
+
# * append newline + line to hold space
|
98 |
+
# * go to next line
|
99 |
+
# * if line starts with doc comment, strip comment character off and loop
|
100 |
+
# * remove target prerequisites
|
101 |
+
# * append hold space (+ newline) to line
|
102 |
+
# * replace newline plus comments by `---`
|
103 |
+
# * print line
|
104 |
+
# Separate expressions are necessary because labels cannot be delimited by
|
105 |
+
# semicolon; see <http://stackoverflow.com/a/11799865/1968>
|
106 |
+
.PHONY: help
|
107 |
+
help:
|
108 |
+
@echo "$$(tput bold)Available rules:$$(tput sgr0)"
|
109 |
+
@echo
|
110 |
+
@sed -n -e "/^## / { \
|
111 |
+
h; \
|
112 |
+
s/.*//; \
|
113 |
+
:doc" \
|
114 |
+
-e "H; \
|
115 |
+
n; \
|
116 |
+
s/^## //; \
|
117 |
+
t doc" \
|
118 |
+
-e "s/:.*//; \
|
119 |
+
G; \
|
120 |
+
s/\\n## /---/; \
|
121 |
+
s/\\n/ /g; \
|
122 |
+
p; \
|
123 |
+
}" ${MAKEFILE_LIST} \
|
124 |
+
| LC_ALL='C' sort --ignore-case \
|
125 |
+
| awk -F '---' \
|
126 |
+
-v ncol=$$(tput cols) \
|
127 |
+
-v indent=19 \
|
128 |
+
-v col_on="$$(tput setaf 6)" \
|
129 |
+
-v col_off="$$(tput sgr0)" \
|
130 |
+
'{ \
|
131 |
+
printf "%s%*s%s ", col_on, -indent, $$1, col_off; \
|
132 |
+
n = split($$2, words, " "); \
|
133 |
+
line_length = ncol - indent; \
|
134 |
+
for (i = 1; i <= n; i++) { \
|
135 |
+
line_length -= length(words[i]) + 1; \
|
136 |
+
if (line_length <= 0) { \
|
137 |
+
line_length = ncol - indent - length(words[i]) - 1; \
|
138 |
+
printf "\n%*s ", -indent, " "; \
|
139 |
+
} \
|
140 |
+
printf "%s ", words[i]; \
|
141 |
+
} \
|
142 |
+
printf "\n"; \
|
143 |
+
}' \
|
144 |
+
| more $(shell test $(shell uname) = Darwin && echo '--no-init --raw-control-chars')
|
Procfile
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
web: sh setup.sh && streamlit run src/app.py
|
README.md
ADDED
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Time series decomposition tool
|
2 |
+
|
3 |
+
Tool demonstrating time series autocorrelation analysis with Python
|
4 |
+
|
5 |
+
Assumes uploaded data is clean.
|
6 |
+
|
7 |
+
## Built With
|
8 |
+
|
9 |
+
- [Streamlit](https://streamlit.io/)
|
10 |
+
|
11 |
+
|
12 |
+
## Local setup
|
13 |
+
|
14 |
+
### Obtain the repo locally and open its root folder
|
15 |
+
|
16 |
+
#### To potentially contribute
|
17 |
+
|
18 |
+
```shell
|
19 |
+
git clone https://github.com/pkiage/tool-time-series-autocorrelation-demo
|
20 |
+
```
|
21 |
+
|
22 |
+
or
|
23 |
+
|
24 |
+
```shell
|
25 |
+
gh repo clone pkiage/tool-time-series-autocorrelation-demo
|
26 |
+
```
|
27 |
+
|
28 |
+
#### Just to deploy locally
|
29 |
+
|
30 |
+
Download ZIP
|
31 |
+
|
32 |
+
### (optional) Setup virtual environment:
|
33 |
+
|
34 |
+
```shell
|
35 |
+
python -m venv venv
|
36 |
+
```
|
37 |
+
|
38 |
+
### (optional) Activate virtual environment:
|
39 |
+
|
40 |
+
#### If using Unix based OS run the following in terminal:
|
41 |
+
|
42 |
+
```shell
|
43 |
+
.\venv\bin\activate
|
44 |
+
```
|
45 |
+
|
46 |
+
#### If using Windows run the following in terminal:
|
47 |
+
|
48 |
+
```shell
|
49 |
+
.\venv\Scripts\activate
|
50 |
+
```
|
51 |
+
|
52 |
+
### Install requirements by running the following in terminal:
|
53 |
+
|
54 |
+
#### Required packages
|
55 |
+
|
56 |
+
```shell
|
57 |
+
pip install -r requirements.txt
|
58 |
+
```
|
59 |
+
|
60 |
+
## Build and install local package
|
61 |
+
|
62 |
+
```shell
|
63 |
+
python setup.py build
|
64 |
+
```
|
65 |
+
|
66 |
+
```shell
|
67 |
+
python setup.py install
|
68 |
+
```
|
69 |
+
|
70 |
+
### Run the streamlit app (app.py) by running the following in terminal (from repository root folder):
|
71 |
+
|
72 |
+
```shell
|
73 |
+
streamlit run src/app.py
|
74 |
+
```
|
75 |
+
|
76 |
+
|
77 |
+
<p><small>Project structure based on the <a target="_blank" href="https://drivendata.github.io/cookiecutter-data-science/">cookiecutter data science project template</a>.</small></p>
|
docs/Makefile
ADDED
@@ -0,0 +1,153 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Makefile for Sphinx documentation
|
2 |
+
#
|
3 |
+
|
4 |
+
# You can set these variables from the command line.
|
5 |
+
SPHINXOPTS =
|
6 |
+
SPHINXBUILD = sphinx-build
|
7 |
+
PAPER =
|
8 |
+
BUILDDIR = _build
|
9 |
+
|
10 |
+
# Internal variables.
|
11 |
+
PAPEROPT_a4 = -D latex_paper_size=a4
|
12 |
+
PAPEROPT_letter = -D latex_paper_size=letter
|
13 |
+
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
14 |
+
# the i18n builder cannot share the environment and doctrees with the others
|
15 |
+
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
16 |
+
|
17 |
+
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
|
18 |
+
|
19 |
+
help:
|
20 |
+
@echo "Please use \`make <target>' where <target> is one of"
|
21 |
+
@echo " html to make standalone HTML files"
|
22 |
+
@echo " dirhtml to make HTML files named index.html in directories"
|
23 |
+
@echo " singlehtml to make a single large HTML file"
|
24 |
+
@echo " pickle to make pickle files"
|
25 |
+
@echo " json to make JSON files"
|
26 |
+
@echo " htmlhelp to make HTML files and a HTML help project"
|
27 |
+
@echo " qthelp to make HTML files and a qthelp project"
|
28 |
+
@echo " devhelp to make HTML files and a Devhelp project"
|
29 |
+
@echo " epub to make an epub"
|
30 |
+
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
31 |
+
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
32 |
+
@echo " text to make text files"
|
33 |
+
@echo " man to make manual pages"
|
34 |
+
@echo " texinfo to make Texinfo files"
|
35 |
+
@echo " info to make Texinfo files and run them through makeinfo"
|
36 |
+
@echo " gettext to make PO message catalogs"
|
37 |
+
@echo " changes to make an overview of all changed/added/deprecated items"
|
38 |
+
@echo " linkcheck to check all external links for integrity"
|
39 |
+
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
40 |
+
|
41 |
+
clean:
|
42 |
+
-rm -rf $(BUILDDIR)/*
|
43 |
+
|
44 |
+
html:
|
45 |
+
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
46 |
+
@echo
|
47 |
+
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
48 |
+
|
49 |
+
dirhtml:
|
50 |
+
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
51 |
+
@echo
|
52 |
+
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
53 |
+
|
54 |
+
singlehtml:
|
55 |
+
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
56 |
+
@echo
|
57 |
+
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
58 |
+
|
59 |
+
pickle:
|
60 |
+
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
61 |
+
@echo
|
62 |
+
@echo "Build finished; now you can process the pickle files."
|
63 |
+
|
64 |
+
json:
|
65 |
+
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
66 |
+
@echo
|
67 |
+
@echo "Build finished; now you can process the JSON files."
|
68 |
+
|
69 |
+
htmlhelp:
|
70 |
+
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
71 |
+
@echo
|
72 |
+
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
73 |
+
".hhp project file in $(BUILDDIR)/htmlhelp."
|
74 |
+
|
75 |
+
qthelp:
|
76 |
+
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
77 |
+
@echo
|
78 |
+
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
79 |
+
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
80 |
+
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/tool-time-series-autocorrelation-demo.qhcp"
|
81 |
+
@echo "To view the help file:"
|
82 |
+
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/tool-time-series-autocorrelation-demo.qhc"
|
83 |
+
|
84 |
+
devhelp:
|
85 |
+
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
86 |
+
@echo
|
87 |
+
@echo "Build finished."
|
88 |
+
@echo "To view the help file:"
|
89 |
+
@echo "# mkdir -p $$HOME/.local/share/devhelp/tool-time-series-autocorrelation-demo"
|
90 |
+
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/tool-time-series-autocorrelation-demo"
|
91 |
+
@echo "# devhelp"
|
92 |
+
|
93 |
+
epub:
|
94 |
+
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
95 |
+
@echo
|
96 |
+
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
97 |
+
|
98 |
+
latex:
|
99 |
+
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
100 |
+
@echo
|
101 |
+
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
102 |
+
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
103 |
+
"(use \`make latexpdf' here to do that automatically)."
|
104 |
+
|
105 |
+
latexpdf:
|
106 |
+
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
107 |
+
@echo "Running LaTeX files through pdflatex..."
|
108 |
+
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
109 |
+
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
110 |
+
|
111 |
+
text:
|
112 |
+
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
113 |
+
@echo
|
114 |
+
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
115 |
+
|
116 |
+
man:
|
117 |
+
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
118 |
+
@echo
|
119 |
+
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
120 |
+
|
121 |
+
texinfo:
|
122 |
+
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
123 |
+
@echo
|
124 |
+
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
125 |
+
@echo "Run \`make' in that directory to run these through makeinfo" \
|
126 |
+
"(use \`make info' here to do that automatically)."
|
127 |
+
|
128 |
+
info:
|
129 |
+
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
130 |
+
@echo "Running Texinfo files through makeinfo..."
|
131 |
+
make -C $(BUILDDIR)/texinfo info
|
132 |
+
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
133 |
+
|
134 |
+
gettext:
|
135 |
+
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
136 |
+
@echo
|
137 |
+
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
138 |
+
|
139 |
+
changes:
|
140 |
+
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
141 |
+
@echo
|
142 |
+
@echo "The overview file is in $(BUILDDIR)/changes."
|
143 |
+
|
144 |
+
linkcheck:
|
145 |
+
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
146 |
+
@echo
|
147 |
+
@echo "Link check complete; look for any errors in the above output " \
|
148 |
+
"or in $(BUILDDIR)/linkcheck/output.txt."
|
149 |
+
|
150 |
+
doctest:
|
151 |
+
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
152 |
+
@echo "Testing of doctests in the sources finished, look at the " \
|
153 |
+
"results in $(BUILDDIR)/doctest/output.txt."
|
docs/commands.rst
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Commands
|
2 |
+
========
|
3 |
+
|
4 |
+
The Makefile contains the central entry points for common tasks related to this project.
|
5 |
+
|
6 |
+
Syncing data to S3
|
7 |
+
^^^^^^^^^^^^^^^^^^
|
8 |
+
|
9 |
+
* `make sync_data_to_s3` will use `aws s3 sync` to recursively sync files in `data/` up to `s3://[OPTIONAL] your-bucket-for-syncing-data (do not include 's3://')/data/`.
|
10 |
+
* `make sync_data_from_s3` will use `aws s3 sync` to recursively sync files from `s3://[OPTIONAL] your-bucket-for-syncing-data (do not include 's3://')/data/` to `data/`.
|
docs/conf.py
ADDED
@@ -0,0 +1,244 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
#
|
3 |
+
# tool-time-series-autocorrelation-demo documentation build configuration file, created by
|
4 |
+
# sphinx-quickstart.
|
5 |
+
#
|
6 |
+
# This file is execfile()d with the current directory set to its containing dir.
|
7 |
+
#
|
8 |
+
# Note that not all possible configuration values are present in this
|
9 |
+
# autogenerated file.
|
10 |
+
#
|
11 |
+
# All configuration values have a default; values that are commented out
|
12 |
+
# serve to show the default.
|
13 |
+
|
14 |
+
import os
|
15 |
+
import sys
|
16 |
+
|
17 |
+
# If extensions (or modules to document with autodoc) are in another directory,
|
18 |
+
# add these directories to sys.path here. If the directory is relative to the
|
19 |
+
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
20 |
+
# sys.path.insert(0, os.path.abspath('.'))
|
21 |
+
|
22 |
+
# -- General configuration -----------------------------------------------------
|
23 |
+
|
24 |
+
# If your documentation needs a minimal Sphinx version, state it here.
|
25 |
+
# needs_sphinx = '1.0'
|
26 |
+
|
27 |
+
# Add any Sphinx extension module names here, as strings. They can be extensions
|
28 |
+
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
29 |
+
extensions = []
|
30 |
+
|
31 |
+
# Add any paths that contain templates here, relative to this directory.
|
32 |
+
templates_path = ['_templates']
|
33 |
+
|
34 |
+
# The suffix of source filenames.
|
35 |
+
source_suffix = '.rst'
|
36 |
+
|
37 |
+
# The encoding of source files.
|
38 |
+
# source_encoding = 'utf-8-sig'
|
39 |
+
|
40 |
+
# The master toctree document.
|
41 |
+
master_doc = 'index'
|
42 |
+
|
43 |
+
# General information about the project.
|
44 |
+
project = u'tool-time-series-autocorrelation-demo'
|
45 |
+
|
46 |
+
# The version info for the project you're documenting, acts as replacement for
|
47 |
+
# |version| and |release|, also used in various other places throughout the
|
48 |
+
# built documents.
|
49 |
+
#
|
50 |
+
# The short X.Y version.
|
51 |
+
version = '0.1'
|
52 |
+
# The full version, including alpha/beta/rc tags.
|
53 |
+
release = '0.1'
|
54 |
+
|
55 |
+
# The language for content autogenerated by Sphinx. Refer to documentation
|
56 |
+
# for a list of supported languages.
|
57 |
+
# language = None
|
58 |
+
|
59 |
+
# There are two options for replacing |today|: either, you set today to some
|
60 |
+
# non-false value, then it is used:
|
61 |
+
# today = ''
|
62 |
+
# Else, today_fmt is used as the format for a strftime call.
|
63 |
+
# today_fmt = '%B %d, %Y'
|
64 |
+
|
65 |
+
# List of patterns, relative to source directory, that match files and
|
66 |
+
# directories to ignore when looking for source files.
|
67 |
+
exclude_patterns = ['_build']
|
68 |
+
|
69 |
+
# The reST default role (used for this markup: `text`) to use for all documents.
|
70 |
+
# default_role = None
|
71 |
+
|
72 |
+
# If true, '()' will be appended to :func: etc. cross-reference text.
|
73 |
+
# add_function_parentheses = True
|
74 |
+
|
75 |
+
# If true, the current module name will be prepended to all description
|
76 |
+
# unit titles (such as .. function::).
|
77 |
+
# add_module_names = True
|
78 |
+
|
79 |
+
# If true, sectionauthor and moduleauthor directives will be shown in the
|
80 |
+
# output. They are ignored by default.
|
81 |
+
# show_authors = False
|
82 |
+
|
83 |
+
# The name of the Pygments (syntax highlighting) style to use.
|
84 |
+
pygments_style = 'sphinx'
|
85 |
+
|
86 |
+
# A list of ignored prefixes for module index sorting.
|
87 |
+
# modindex_common_prefix = []
|
88 |
+
|
89 |
+
|
90 |
+
# -- Options for HTML output ---------------------------------------------------
|
91 |
+
|
92 |
+
# The theme to use for HTML and HTML Help pages. See the documentation for
|
93 |
+
# a list of builtin themes.
|
94 |
+
html_theme = 'default'
|
95 |
+
|
96 |
+
# Theme options are theme-specific and customize the look and feel of a theme
|
97 |
+
# further. For a list of options available for each theme, see the
|
98 |
+
# documentation.
|
99 |
+
# html_theme_options = {}
|
100 |
+
|
101 |
+
# Add any paths that contain custom themes here, relative to this directory.
|
102 |
+
# html_theme_path = []
|
103 |
+
|
104 |
+
# The name for this set of Sphinx documents. If None, it defaults to
|
105 |
+
# "<project> v<release> documentation".
|
106 |
+
# html_title = None
|
107 |
+
|
108 |
+
# A shorter title for the navigation bar. Default is the same as html_title.
|
109 |
+
# html_short_title = None
|
110 |
+
|
111 |
+
# The name of an image file (relative to this directory) to place at the top
|
112 |
+
# of the sidebar.
|
113 |
+
# html_logo = None
|
114 |
+
|
115 |
+
# The name of an image file (within the static path) to use as favicon of the
|
116 |
+
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
117 |
+
# pixels large.
|
118 |
+
# html_favicon = None
|
119 |
+
|
120 |
+
# Add any paths that contain custom static files (such as style sheets) here,
|
121 |
+
# relative to this directory. They are copied after the builtin static files,
|
122 |
+
# so a file named "default.css" will overwrite the builtin "default.css".
|
123 |
+
html_static_path = ['_static']
|
124 |
+
|
125 |
+
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
126 |
+
# using the given strftime format.
|
127 |
+
# html_last_updated_fmt = '%b %d, %Y'
|
128 |
+
|
129 |
+
# If true, SmartyPants will be used to convert quotes and dashes to
|
130 |
+
# typographically correct entities.
|
131 |
+
# html_use_smartypants = True
|
132 |
+
|
133 |
+
# Custom sidebar templates, maps document names to template names.
|
134 |
+
# html_sidebars = {}
|
135 |
+
|
136 |
+
# Additional templates that should be rendered to pages, maps page names to
|
137 |
+
# template names.
|
138 |
+
# html_additional_pages = {}
|
139 |
+
|
140 |
+
# If false, no module index is generated.
|
141 |
+
# html_domain_indices = True
|
142 |
+
|
143 |
+
# If false, no index is generated.
|
144 |
+
# html_use_index = True
|
145 |
+
|
146 |
+
# If true, the index is split into individual pages for each letter.
|
147 |
+
# html_split_index = False
|
148 |
+
|
149 |
+
# If true, links to the reST sources are added to the pages.
|
150 |
+
# html_show_sourcelink = True
|
151 |
+
|
152 |
+
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
153 |
+
# html_show_sphinx = True
|
154 |
+
|
155 |
+
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
156 |
+
# html_show_copyright = True
|
157 |
+
|
158 |
+
# If true, an OpenSearch description file will be output, and all pages will
|
159 |
+
# contain a <link> tag referring to it. The value of this option must be the
|
160 |
+
# base URL from which the finished HTML is served.
|
161 |
+
# html_use_opensearch = ''
|
162 |
+
|
163 |
+
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
164 |
+
# html_file_suffix = None
|
165 |
+
|
166 |
+
# Output file base name for HTML help builder.
|
167 |
+
htmlhelp_basename = 'tool-time-series-autocorrelation-demodoc'
|
168 |
+
|
169 |
+
|
170 |
+
# -- Options for LaTeX output --------------------------------------------------
|
171 |
+
|
172 |
+
latex_elements = {
|
173 |
+
# The paper size ('letterpaper' or 'a4paper').
|
174 |
+
# 'papersize': 'letterpaper',
|
175 |
+
|
176 |
+
# The font size ('10pt', '11pt' or '12pt').
|
177 |
+
# 'pointsize': '10pt',
|
178 |
+
|
179 |
+
# Additional stuff for the LaTeX preamble.
|
180 |
+
# 'preamble': '',
|
181 |
+
}
|
182 |
+
|
183 |
+
# Grouping the document tree into LaTeX files. List of tuples
|
184 |
+
# (source start file, target name, title, author, documentclass [howto/manual]).
|
185 |
+
latex_documents = [
|
186 |
+
('index',
|
187 |
+
'tool-time-series-autocorrelation-demo.tex',
|
188 |
+
u'tool-time-series-autocorrelation-demo Documentation',
|
189 |
+
u"Author", 'manual'),
|
190 |
+
]
|
191 |
+
|
192 |
+
# The name of an image file (relative to this directory) to place at the top of
|
193 |
+
# the title page.
|
194 |
+
# latex_logo = None
|
195 |
+
|
196 |
+
# For "manual" documents, if this is true, then toplevel headings are parts,
|
197 |
+
# not chapters.
|
198 |
+
# latex_use_parts = False
|
199 |
+
|
200 |
+
# If true, show page references after internal links.
|
201 |
+
# latex_show_pagerefs = False
|
202 |
+
|
203 |
+
# If true, show URL addresses after external links.
|
204 |
+
# latex_show_urls = False
|
205 |
+
|
206 |
+
# Documents to append as an appendix to all manuals.
|
207 |
+
# latex_appendices = []
|
208 |
+
|
209 |
+
# If false, no module index is generated.
|
210 |
+
# latex_domain_indices = True
|
211 |
+
|
212 |
+
|
213 |
+
# -- Options for manual page output --------------------------------------------
|
214 |
+
|
215 |
+
# One entry per manual page. List of tuples
|
216 |
+
# (source start file, name, description, authors, manual section).
|
217 |
+
man_pages = [
|
218 |
+
('index', 'tool-time-series-autocorrelation-demo', u'tool-time-series-autocorrelation-demo Documentation',
|
219 |
+
[u"Author"], 1)
|
220 |
+
]
|
221 |
+
|
222 |
+
# If true, show URL addresses after external links.
|
223 |
+
# man_show_urls = False
|
224 |
+
|
225 |
+
|
226 |
+
# -- Options for Texinfo output ------------------------------------------------
|
227 |
+
|
228 |
+
# Grouping the document tree into Texinfo files. List of tuples
|
229 |
+
# (source start file, target name, title, author,
|
230 |
+
# dir menu entry, description, category)
|
231 |
+
texinfo_documents = [
|
232 |
+
('index', 'tool-time-series-autocorrelation-demo', u'tool-time-series-autocorrelation-demo Documentation',
|
233 |
+
u"Author", 'tool-time-series-autocorrelation-demo',
|
234 |
+
'Tool demonstrating time series autocorrelation analysis with Python', 'Miscellaneous'),
|
235 |
+
]
|
236 |
+
|
237 |
+
# Documents to append as an appendix to all manuals.
|
238 |
+
# texinfo_appendices = []
|
239 |
+
|
240 |
+
# If false, no module index is generated.
|
241 |
+
# texinfo_domain_indices = True
|
242 |
+
|
243 |
+
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
244 |
+
# texinfo_show_urls = 'footnote'
|
docs/getting-started.rst
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Getting started
|
2 |
+
===============
|
3 |
+
|
4 |
+
This is where you describe how to get set up on a clean install, including the
|
5 |
+
commands necessary to get the raw data (using the `sync_data_from_s3` command,
|
6 |
+
for example), and then how to make the cleaned, final data sets.
|
docs/index.rst
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
.. tool-time-series-autocorrelation-demo documentation master file, created by
|
2 |
+
sphinx-quickstart.
|
3 |
+
You can adapt this file completely to your liking, but it should at least
|
4 |
+
contain the root `toctree` directive.
|
5 |
+
|
6 |
+
tool-time-series-autocorrelation-demo documentation!
|
7 |
+
==============================================
|
8 |
+
|
9 |
+
Contents:
|
10 |
+
|
11 |
+
.. toctree::
|
12 |
+
:maxdepth: 2
|
13 |
+
|
14 |
+
getting-started
|
15 |
+
commands
|
16 |
+
|
17 |
+
|
18 |
+
|
19 |
+
Indices and tables
|
20 |
+
==================
|
21 |
+
|
22 |
+
* :ref:`genindex`
|
23 |
+
* :ref:`modindex`
|
24 |
+
* :ref:`search`
|
docs/make.bat
ADDED
@@ -0,0 +1,190 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
@ECHO OFF
|
2 |
+
|
3 |
+
REM Command file for Sphinx documentation
|
4 |
+
|
5 |
+
if "%SPHINXBUILD%" == "" (
|
6 |
+
set SPHINXBUILD=sphinx-build
|
7 |
+
)
|
8 |
+
set BUILDDIR=_build
|
9 |
+
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
|
10 |
+
set I18NSPHINXOPTS=%SPHINXOPTS% .
|
11 |
+
if NOT "%PAPER%" == "" (
|
12 |
+
set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
|
13 |
+
set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
|
14 |
+
)
|
15 |
+
|
16 |
+
if "%1" == "" goto help
|
17 |
+
|
18 |
+
if "%1" == "help" (
|
19 |
+
:help
|
20 |
+
echo.Please use `make ^<target^>` where ^<target^> is one of
|
21 |
+
echo. html to make standalone HTML files
|
22 |
+
echo. dirhtml to make HTML files named index.html in directories
|
23 |
+
echo. singlehtml to make a single large HTML file
|
24 |
+
echo. pickle to make pickle files
|
25 |
+
echo. json to make JSON files
|
26 |
+
echo. htmlhelp to make HTML files and a HTML help project
|
27 |
+
echo. qthelp to make HTML files and a qthelp project
|
28 |
+
echo. devhelp to make HTML files and a Devhelp project
|
29 |
+
echo. epub to make an epub
|
30 |
+
echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
|
31 |
+
echo. text to make text files
|
32 |
+
echo. man to make manual pages
|
33 |
+
echo. texinfo to make Texinfo files
|
34 |
+
echo. gettext to make PO message catalogs
|
35 |
+
echo. changes to make an overview over all changed/added/deprecated items
|
36 |
+
echo. linkcheck to check all external links for integrity
|
37 |
+
echo. doctest to run all doctests embedded in the documentation if enabled
|
38 |
+
goto end
|
39 |
+
)
|
40 |
+
|
41 |
+
if "%1" == "clean" (
|
42 |
+
for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
|
43 |
+
del /q /s %BUILDDIR%\*
|
44 |
+
goto end
|
45 |
+
)
|
46 |
+
|
47 |
+
if "%1" == "html" (
|
48 |
+
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
|
49 |
+
if errorlevel 1 exit /b 1
|
50 |
+
echo.
|
51 |
+
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
|
52 |
+
goto end
|
53 |
+
)
|
54 |
+
|
55 |
+
if "%1" == "dirhtml" (
|
56 |
+
%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
|
57 |
+
if errorlevel 1 exit /b 1
|
58 |
+
echo.
|
59 |
+
echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
|
60 |
+
goto end
|
61 |
+
)
|
62 |
+
|
63 |
+
if "%1" == "singlehtml" (
|
64 |
+
%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
|
65 |
+
if errorlevel 1 exit /b 1
|
66 |
+
echo.
|
67 |
+
echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
|
68 |
+
goto end
|
69 |
+
)
|
70 |
+
|
71 |
+
if "%1" == "pickle" (
|
72 |
+
%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
|
73 |
+
if errorlevel 1 exit /b 1
|
74 |
+
echo.
|
75 |
+
echo.Build finished; now you can process the pickle files.
|
76 |
+
goto end
|
77 |
+
)
|
78 |
+
|
79 |
+
if "%1" == "json" (
|
80 |
+
%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
|
81 |
+
if errorlevel 1 exit /b 1
|
82 |
+
echo.
|
83 |
+
echo.Build finished; now you can process the JSON files.
|
84 |
+
goto end
|
85 |
+
)
|
86 |
+
|
87 |
+
if "%1" == "htmlhelp" (
|
88 |
+
%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
|
89 |
+
if errorlevel 1 exit /b 1
|
90 |
+
echo.
|
91 |
+
echo.Build finished; now you can run HTML Help Workshop with the ^
|
92 |
+
.hhp project file in %BUILDDIR%/htmlhelp.
|
93 |
+
goto end
|
94 |
+
)
|
95 |
+
|
96 |
+
if "%1" == "qthelp" (
|
97 |
+
%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
|
98 |
+
if errorlevel 1 exit /b 1
|
99 |
+
echo.
|
100 |
+
echo.Build finished; now you can run "qcollectiongenerator" with the ^
|
101 |
+
.qhcp project file in %BUILDDIR%/qthelp, like this:
|
102 |
+
echo.^> qcollectiongenerator %BUILDDIR%\qthelp\tool-time-series-autocorrelation-demo.qhcp
|
103 |
+
echo.To view the help file:
|
104 |
+
echo.^> assistant -collectionFile %BUILDDIR%\qthelp\tool-time-series-autocorrelation-demo.ghc
|
105 |
+
goto end
|
106 |
+
)
|
107 |
+
|
108 |
+
if "%1" == "devhelp" (
|
109 |
+
%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
|
110 |
+
if errorlevel 1 exit /b 1
|
111 |
+
echo.
|
112 |
+
echo.Build finished.
|
113 |
+
goto end
|
114 |
+
)
|
115 |
+
|
116 |
+
if "%1" == "epub" (
|
117 |
+
%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
|
118 |
+
if errorlevel 1 exit /b 1
|
119 |
+
echo.
|
120 |
+
echo.Build finished. The epub file is in %BUILDDIR%/epub.
|
121 |
+
goto end
|
122 |
+
)
|
123 |
+
|
124 |
+
if "%1" == "latex" (
|
125 |
+
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
|
126 |
+
if errorlevel 1 exit /b 1
|
127 |
+
echo.
|
128 |
+
echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
|
129 |
+
goto end
|
130 |
+
)
|
131 |
+
|
132 |
+
if "%1" == "text" (
|
133 |
+
%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
|
134 |
+
if errorlevel 1 exit /b 1
|
135 |
+
echo.
|
136 |
+
echo.Build finished. The text files are in %BUILDDIR%/text.
|
137 |
+
goto end
|
138 |
+
)
|
139 |
+
|
140 |
+
if "%1" == "man" (
|
141 |
+
%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
|
142 |
+
if errorlevel 1 exit /b 1
|
143 |
+
echo.
|
144 |
+
echo.Build finished. The manual pages are in %BUILDDIR%/man.
|
145 |
+
goto end
|
146 |
+
)
|
147 |
+
|
148 |
+
if "%1" == "texinfo" (
|
149 |
+
%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
|
150 |
+
if errorlevel 1 exit /b 1
|
151 |
+
echo.
|
152 |
+
echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
|
153 |
+
goto end
|
154 |
+
)
|
155 |
+
|
156 |
+
if "%1" == "gettext" (
|
157 |
+
%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
|
158 |
+
if errorlevel 1 exit /b 1
|
159 |
+
echo.
|
160 |
+
echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
|
161 |
+
goto end
|
162 |
+
)
|
163 |
+
|
164 |
+
if "%1" == "changes" (
|
165 |
+
%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
|
166 |
+
if errorlevel 1 exit /b 1
|
167 |
+
echo.
|
168 |
+
echo.The overview file is in %BUILDDIR%/changes.
|
169 |
+
goto end
|
170 |
+
)
|
171 |
+
|
172 |
+
if "%1" == "linkcheck" (
|
173 |
+
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
|
174 |
+
if errorlevel 1 exit /b 1
|
175 |
+
echo.
|
176 |
+
echo.Link check complete; look for any errors in the above output ^
|
177 |
+
or in %BUILDDIR%/linkcheck/output.txt.
|
178 |
+
goto end
|
179 |
+
)
|
180 |
+
|
181 |
+
if "%1" == "doctest" (
|
182 |
+
%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
|
183 |
+
if errorlevel 1 exit /b 1
|
184 |
+
echo.
|
185 |
+
echo.Testing of doctests in the sources finished, look at the ^
|
186 |
+
results in %BUILDDIR%/doctest/output.txt.
|
187 |
+
goto end
|
188 |
+
)
|
189 |
+
|
190 |
+
:end
|
models/.gitkeep
ADDED
File without changes
|
notebooks/.gitkeep
ADDED
File without changes
|
references/.gitkeep
ADDED
File without changes
|
references/References.md
ADDED
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Theory & Practice
|
2 |
+
[Autocorrelation](https://en.wikipedia.org/wiki/Autocorrelation
|
3 |
+
)
|
4 |
+
|
5 |
+
[Autocorrelation Plot](https://www.itl.nist.gov/div898/handbook/eda/section3/autocopl.htm)
|
6 |
+
|
7 |
+
[Autocorrelation time series in Python](https://www.alpharithms.com/autocorrelation-time-series-python-432909/)
|
8 |
+
|
9 |
+
[Gentle introduction to autocorrelation and partial autocorrelation](https://machinelearningmastery.com/gentle-introduction-autocorrelation-partial-autocorrelation/)
|
10 |
+
|
11 |
+
[Partial autocorrelation function](https://en.wikipedia.org/wiki/Partial_autocorrelation_function)
|
12 |
+
|
13 |
+
[Partial autocorrelation plot](https://www.itl.nist.gov/div898/handbook/pmc/section4/pmc4463.htm)
|
14 |
+
|
15 |
+
[Plot_pacf, plot_acf, autocorrelation_plot and lag_plot](https://community.plotly.com/t/plot-pacf-plot-acf-autocorrelation-plot-and-lag-plot/24108/4)
|
16 |
+
|
17 |
+
[Serial Correlation](https://en.wikibooks.org/wiki/Econometric_Theory/Serial_Correlation)
|
18 |
+
|
19 |
+
[statsmodels.graphics.tsaplots.plot_acf](https://www.statsmodels.org/dev/generated/statsmodels.graphics.tsaplots.plot_acf.html)
|
20 |
+
|
21 |
+
[statsmodels.graphics.tsaplots.plot_pacf](https://www.statsmodels.org/dev/generated/statsmodels.graphics.tsaplots.plot_pacf.html)
|
22 |
+
|
23 |
+
[statsmodels.tsa.stattools.acf](https://www.statsmodels.org/dev/generated/statsmodels.tsa.stattools.acf.html)
|
24 |
+
|
25 |
+
[statsmodels.tsa.stattools.pacf.html](https://www.statsmodels.org/dev/generated/statsmodels.tsa.stattools.pacf.html)
|
26 |
+
|
27 |
+
[Time Series with Python](https://www.datacamp.com/tracks/time-series-with-python)
|
28 |
+
|
29 |
+
# Data
|
30 |
+
[NOAA Climate Data Online Search](https://www.ncdc.noaa.gov/cdo-web/search)
|
31 |
+
|
32 |
+
[Time Series with Python](https://www.datacamp.com/tracks/time-series-with-python)
|
reports/.gitkeep
ADDED
File without changes
|
reports/figures/.gitkeep
ADDED
File without changes
|
requirements.txt
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# local package
|
2 |
+
-e .
|
3 |
+
|
4 |
+
# external requirements
|
5 |
+
streamlit==1.9.2
|
6 |
+
pandas==1.4.2
|
7 |
+
statsmodels==0.13.2
|
8 |
+
plotly==5.8.0
|
9 |
+
matplotlib==3.5.2
|
10 |
+
statsmodels==0.13.2
|
11 |
+
numpy==1.22.0
|
setup.py
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from setuptools import find_packages, setup
|
2 |
+
|
3 |
+
setup(
|
4 |
+
name='src',
|
5 |
+
packages=find_packages(),
|
6 |
+
version='0.1.0',
|
7 |
+
description='Tool demonstrating time series autocorrelation analysis with Python',
|
8 |
+
author='Author',
|
9 |
+
license='MIT',
|
10 |
+
)
|
setup.sh
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
mkdir -p ~/.streamlit/
|
2 |
+
|
3 |
+
cat << EOF > ~/.streamlit/credentials.toml
|
4 |
+
[general]
|
5 |
+
email = "paul.r.kiage@gmail.com"
|
6 |
+
EOF
|
7 |
+
|
8 |
+
cat << EOF > ~/.streamlit/config.toml
|
9 |
+
[server]
|
10 |
+
headless = true
|
11 |
+
enableCORS = true
|
12 |
+
port = $PORT
|
13 |
+
EOF
|
src/__init__.py
ADDED
File without changes
|
src/app.py
ADDED
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from data.utils import *
|
2 |
+
from features.utils import *
|
3 |
+
from visualization.visualize import *
|
4 |
+
from app_utils import *
|
5 |
+
|
6 |
+
|
7 |
+
def main():
|
8 |
+
|
9 |
+
st.title("Time Series Autocorrelation Demo")
|
10 |
+
|
11 |
+
st.write("""
|
12 |
+
Autocorrelation is the correlation of a single time series with a lag copy of itself.\n
|
13 |
+
In the discrete case autocorrelation is also referred to as serial correlation.\n
|
14 |
+
In general autocorrelation usually refers to the lag-one autocorrelation.
|
15 |
+
""")
|
16 |
+
|
17 |
+
st.title("Data")
|
18 |
+
|
19 |
+
sample_data_selected = st.selectbox(
|
20 |
+
'Select sample data:', data_set_options)
|
21 |
+
|
22 |
+
data = import_sample_data(sample_data_selected, data_set_options)
|
23 |
+
|
24 |
+
display_input_data(data)
|
25 |
+
|
26 |
+
st.title("Time Series Autocorrelation")
|
27 |
+
|
28 |
+
data_array = data.values.squeeze()
|
29 |
+
|
30 |
+
data_series = pd.Series(data_array).dropna()
|
31 |
+
|
32 |
+
st.header("Auto-Correlation Function (ACF)")
|
33 |
+
|
34 |
+
st.write("""
|
35 |
+
ACF shows the entire autocorrelation function for different lags (not just lag-one).\n
|
36 |
+
Given the autocorrelation is a function of the lag any significant non-zero correlation imply the series can be forecast from the past.\n
|
37 |
+
Lag 0 autocorrelation will always be 1 since the values (y-axis) are the same at the same time (x-axis) for the same time series.
|
38 |
+
""")
|
39 |
+
|
40 |
+
acf_type = st.radio(
|
41 |
+
'Default ACF:', ('True', 'False'), key='acf_type')
|
42 |
+
|
43 |
+
default_acf_selected = acf_type == 'True'
|
44 |
+
|
45 |
+
if default_acf_selected:
|
46 |
+
acf_array = create_standard_acf_array(data_series)
|
47 |
+
|
48 |
+
if not default_acf_selected:
|
49 |
+
[confidence_level,
|
50 |
+
acf_nlags_selected,
|
51 |
+
acf_fft_selected,
|
52 |
+
acf_adjust_selected] = acf_settings()
|
53 |
+
|
54 |
+
alpha_selected = (100-confidence_level)/100
|
55 |
+
|
56 |
+
acf_array = acf(data_series,
|
57 |
+
alpha=alpha_selected,
|
58 |
+
nlags=acf_nlags_selected,
|
59 |
+
fft=acf_fft_selected,
|
60 |
+
adjusted=acf_adjust_selected)
|
61 |
+
|
62 |
+
corr_presentation(acf_array)
|
63 |
+
|
64 |
+
st.subheader("ACF Plot")
|
65 |
+
|
66 |
+
if default_acf_selected:
|
67 |
+
|
68 |
+
st.write('Given a confidence inverval of 95% (significance level of 0.05) there is a 5% chance that if true autocorrelation is zero, it will fall outside blue band.')
|
69 |
+
|
70 |
+
create_standard_corr_plot(data_series, plot_pacf=False)
|
71 |
+
|
72 |
+
if not default_acf_selected:
|
73 |
+
|
74 |
+
st.write(
|
75 |
+
f'Given a confidence inverval of {confidence_level}% (significance level of {alpha_selected}) there is a {alpha_selected*100}% chance that if true autocorrelation is zero, it will fall outside blue band.')
|
76 |
+
|
77 |
+
create_acf_plot(data_series,
|
78 |
+
alpha_selected,
|
79 |
+
acf_nlags_selected,
|
80 |
+
acf_fft_selected)
|
81 |
+
|
82 |
+
st.header("Partial Auto-Correlation Function (PACF)")
|
83 |
+
|
84 |
+
st.write("""Unlike ACF, PACF controls for other lags.\n
|
85 |
+
PACF represents how significant adding lag n is when you already have lag n-1.""")
|
86 |
+
|
87 |
+
pacf_type = st.radio(
|
88 |
+
'Default PACF:', ('True', 'False'), key='pacf_type')
|
89 |
+
|
90 |
+
default_pacf_selected = pacf_type == 'True'
|
91 |
+
|
92 |
+
if default_pacf_selected:
|
93 |
+
pacf_array = create_standard_pacf_array(data_series)
|
94 |
+
|
95 |
+
if not default_pacf_selected:
|
96 |
+
[confidence_level,
|
97 |
+
pacf_nlags_selected,
|
98 |
+
pacf_calculation_method] = pacf_settings()
|
99 |
+
|
100 |
+
alpha_selected = (100-confidence_level)/100
|
101 |
+
|
102 |
+
pacf_array = pacf(data_series,
|
103 |
+
alpha=alpha_selected,
|
104 |
+
nlags=pacf_nlags_selected,
|
105 |
+
method=pacf_calculation_method)
|
106 |
+
|
107 |
+
corr_presentation(pacf_array)
|
108 |
+
|
109 |
+
st.subheader("PACF Plot")
|
110 |
+
|
111 |
+
if default_pacf_selected:
|
112 |
+
st.write('Given a confidence inverval of 95% (significance level of 0.05) there is a 5% chance that if true autocorrelation is zero, it will fall outside blue band.')
|
113 |
+
create_standard_corr_plot(data_series, plot_pacf=True)
|
114 |
+
|
115 |
+
if not default_pacf_selected:
|
116 |
+
st.write(
|
117 |
+
f'Given a confidence inverval of {confidence_level}% (significance level of {alpha_selected}) there is a {alpha_selected*100}% chance that if true autocorrelation is zero, it will fall outside blue band.')
|
118 |
+
create_pacf_plot(data_series,
|
119 |
+
alpha_selected,
|
120 |
+
pacf_nlags_selected,
|
121 |
+
pacf_calculation_method)
|
122 |
+
|
123 |
+
|
124 |
+
if __name__ == "__main__":
|
125 |
+
main()
|
src/app_utils.py
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
from features.utils import corr_analysis
|
3 |
+
|
4 |
+
|
5 |
+
def corr_presentation(acf_array):
|
6 |
+
[acf_array_1,
|
7 |
+
df,
|
8 |
+
significant_non_zero_correlations_count] = corr_analysis(acf_array)
|
9 |
+
|
10 |
+
with st.expander('Autocorrelations:'):
|
11 |
+
st.write(acf_array_1)
|
12 |
+
|
13 |
+
with st.expander('Significant Autocorrelations:'):
|
14 |
+
st.metric(label='Total significant autocorrelations:',
|
15 |
+
value=significant_non_zero_correlations_count)
|
16 |
+
|
17 |
+
st.dataframe(df)
|
src/data/.gitkeep
ADDED
File without changes
|
src/data/__init__.py
ADDED
File without changes
|
src/data/utils.py
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pandas as pd
|
2 |
+
import statsmodels.api as sm
|
3 |
+
data_set_options = [
|
4 |
+
'Earthquake time series',
|
5 |
+
'Stationarized temperature time series',
|
6 |
+
'Sunspots'
|
7 |
+
]
|
8 |
+
|
9 |
+
|
10 |
+
def import_sample_data(sample_data_selected, data_set_options):
|
11 |
+
if sample_data_selected == data_set_options[0]:
|
12 |
+
data = pd.read_csv('data/processed/earthquake.csv',
|
13 |
+
parse_dates=['date'], index_col='date')
|
14 |
+
|
15 |
+
if sample_data_selected == data_set_options[1]:
|
16 |
+
data = pd.read_csv('data/processed/stationary_temp_NY.csv',
|
17 |
+
parse_dates=['DATE'], index_col='DATE')
|
18 |
+
|
19 |
+
if sample_data_selected == data_set_options[2]:
|
20 |
+
dta = sm.datasets.sunspots.load_pandas().data
|
21 |
+
dta.index = pd.Index(sm.tsa.datetools.dates_from_range('1700', '2008'))
|
22 |
+
del dta["YEAR"]
|
23 |
+
data = dta
|
24 |
+
return data
|
src/features/utils.py
ADDED
@@ -0,0 +1,117 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
from statsmodels.graphics.tsaplots import acf, pacf
|
3 |
+
import pandas as pd
|
4 |
+
import streamlit as st
|
5 |
+
|
6 |
+
|
7 |
+
def create_standard_acf_array(data_series):
|
8 |
+
return acf(data_series, alpha=0.05)
|
9 |
+
|
10 |
+
|
11 |
+
def create_standard_pacf_array(data_series):
|
12 |
+
return pacf(data_series, alpha=0.05)
|
13 |
+
|
14 |
+
|
15 |
+
def calculate_corr_significance_intervals(corr_array):
|
16 |
+
lower = corr_array[1][:, 0] - corr_array[0]
|
17 |
+
upper = corr_array[1][:, 1] - corr_array[0]
|
18 |
+
return lower, upper
|
19 |
+
|
20 |
+
|
21 |
+
def corr_significance_analysis(significance_values, acf_array):
|
22 |
+
corr_significance_check = abs(acf_array) - abs(significance_values)
|
23 |
+
|
24 |
+
signficant_non_zero_index = np.where(corr_significance_check > 0)
|
25 |
+
|
26 |
+
significant_non_zero_correlations = acf_array[signficant_non_zero_index]
|
27 |
+
|
28 |
+
df = pd.DataFrame(
|
29 |
+
{'Lag': np.array(signficant_non_zero_index).squeeze(),
|
30 |
+
'Significant non-zero autocorrelation value': significant_non_zero_correlations,
|
31 |
+
'Autocorrelation absolute value': np.abs(significant_non_zero_correlations)
|
32 |
+
})
|
33 |
+
|
34 |
+
significant_non_zero_correlations_count = df.shape[0]
|
35 |
+
|
36 |
+
return df, significant_non_zero_correlations_count
|
37 |
+
|
38 |
+
|
39 |
+
def corr_analysis(corr_array):
|
40 |
+
lower, upper = calculate_corr_significance_intervals(
|
41 |
+
corr_array)
|
42 |
+
|
43 |
+
corr_array_1 = corr_array[0]
|
44 |
+
|
45 |
+
df, significant_non_zero_correlations_count = corr_significance_analysis(
|
46 |
+
lower, corr_array_1)
|
47 |
+
|
48 |
+
return corr_array_1, df, significant_non_zero_correlations_count
|
49 |
+
|
50 |
+
|
51 |
+
def acf_settings():
|
52 |
+
with st.expander('ACF Settings:'):
|
53 |
+
|
54 |
+
acf_nlags_selected_col, acf_confidence_selected_col = st.columns(2)
|
55 |
+
|
56 |
+
with acf_nlags_selected_col:
|
57 |
+
acf_nlags_selected = st.number_input(
|
58 |
+
'Number of non-zero lags:', key='acf_nlags_selected', value=1)
|
59 |
+
|
60 |
+
with acf_confidence_selected_col:
|
61 |
+
confidence_interval = st.slider(
|
62 |
+
'Confidence interval (%)', min_value=0, max_value=99, value=95)
|
63 |
+
|
64 |
+
acf_adjust_selected_col, acf_fft_selected_col = st.columns(2)
|
65 |
+
with acf_adjust_selected_col:
|
66 |
+
acf_adjust_selected = st.radio(
|
67 |
+
'Adjusted:', ('False', 'True'), key='acf_adjust_selected')
|
68 |
+
|
69 |
+
with acf_fft_selected_col:
|
70 |
+
acf_fft_selected = st.radio(
|
71 |
+
'Compute ACF via FFT:', ('True', 'False'), key='acf_fft_selected')
|
72 |
+
|
73 |
+
acf_adjust_selected = acf_adjust_selected == 'True'
|
74 |
+
|
75 |
+
acf_fft_selected = acf_fft_selected == 'True'
|
76 |
+
|
77 |
+
return [confidence_interval,
|
78 |
+
acf_nlags_selected,
|
79 |
+
acf_fft_selected,
|
80 |
+
acf_adjust_selected]
|
81 |
+
|
82 |
+
|
83 |
+
pacf_calculation_methods = [
|
84 |
+
# Yule-Walker with sample-size adjustment in denominator for acovf. Default.
|
85 |
+
'yw', 'ywadjusted',
|
86 |
+
# Yule-Walker without adjustment. Default.
|
87 |
+
'ywm', 'ywmle',
|
88 |
+
# regression of time series on lags of it and on constant.
|
89 |
+
'ols',
|
90 |
+
# regression of time series on lags using a single common sample to estimate all pacf coefficients.
|
91 |
+
'ols-inefficient',
|
92 |
+
# regression of time series on lags with a bias adjustment.
|
93 |
+
'ols-adjusted',
|
94 |
+
# Levinson-Durbin recursion with bias correction.
|
95 |
+
'ld', 'ldadjusted',
|
96 |
+
# Levinson-Durbin recursion without bias correction.
|
97 |
+
'ldb', 'ldbiased']
|
98 |
+
|
99 |
+
|
100 |
+
def pacf_settings():
|
101 |
+
with st.expander('PACF Settings:'):
|
102 |
+
|
103 |
+
pacf_nlags_selected_col, pacf_confidence_selected_col = st.columns(2)
|
104 |
+
|
105 |
+
with pacf_nlags_selected_col:
|
106 |
+
pacf_nlags_selected = st.number_input(
|
107 |
+
'Number of non-zero lags:', key='ppacf_nlags_selected', value=1)
|
108 |
+
|
109 |
+
with pacf_confidence_selected_col:
|
110 |
+
confidence_interval = st.slider(
|
111 |
+
'Confidence interval (%)', min_value=0, max_value=99, value=95, key='pacf_confidence_selected_col')
|
112 |
+
|
113 |
+
pacf_calculation_method = st.selectbox(
|
114 |
+
label='Method for calculation', options=pacf_calculation_methods)
|
115 |
+
return [confidence_interval,
|
116 |
+
pacf_nlags_selected,
|
117 |
+
pacf_calculation_method]
|
src/visualization/.gitkeep
ADDED
File without changes
|
src/visualization/__init__.py
ADDED
File without changes
|
src/visualization/visualize.py
ADDED
@@ -0,0 +1,192 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pandas as pd
|
2 |
+
import streamlit as st
|
3 |
+
import plotly.express as px
|
4 |
+
from statsmodels.graphics.tsaplots import plot_acf, plot_pacf
|
5 |
+
from statsmodels.graphics.tsaplots import acf, pacf
|
6 |
+
import numpy as np
|
7 |
+
import plotly.graph_objects as go
|
8 |
+
|
9 |
+
|
10 |
+
def streamlit_2columns_metrics_df_shape(df: pd.DataFrame):
|
11 |
+
(
|
12 |
+
column1name,
|
13 |
+
column2name,
|
14 |
+
) = st.columns(2)
|
15 |
+
|
16 |
+
with column1name:
|
17 |
+
st.metric(
|
18 |
+
label="Rows",
|
19 |
+
value=df.shape[0],
|
20 |
+
delta=None,
|
21 |
+
delta_color="normal",
|
22 |
+
)
|
23 |
+
|
24 |
+
with column2name:
|
25 |
+
st.metric(
|
26 |
+
label="Columns",
|
27 |
+
value=df.shape[1],
|
28 |
+
delta=None,
|
29 |
+
delta_color="normal",
|
30 |
+
)
|
31 |
+
|
32 |
+
|
33 |
+
def show_inputted_dataframe(data):
|
34 |
+
with st.expander("Input Dataframe (X and y)"):
|
35 |
+
st.dataframe(data)
|
36 |
+
streamlit_2columns_metrics_df_shape(data)
|
37 |
+
|
38 |
+
|
39 |
+
def time_series_line_plot(data):
|
40 |
+
fig = px.line(
|
41 |
+
data
|
42 |
+
)
|
43 |
+
st.plotly_chart(fig, use_container_width=True)
|
44 |
+
|
45 |
+
|
46 |
+
def time_series_scatter_plot(data):
|
47 |
+
fig = px.scatter(data, trendline="ols")
|
48 |
+
st.plotly_chart(fig, use_container_width=True)
|
49 |
+
|
50 |
+
|
51 |
+
def time_series_box_plot(data):
|
52 |
+
fig = px.box(data, points="all")
|
53 |
+
st.plotly_chart(fig, use_container_width=True)
|
54 |
+
|
55 |
+
|
56 |
+
def streamlit_chart_setting_height_width(
|
57 |
+
title: str,
|
58 |
+
default_widthvalue: int,
|
59 |
+
default_heightvalue: int,
|
60 |
+
widthkey: str,
|
61 |
+
heightkey: str,
|
62 |
+
):
|
63 |
+
with st.expander(title):
|
64 |
+
|
65 |
+
lbarx_col, lbary_col = st.columns(2)
|
66 |
+
|
67 |
+
with lbarx_col:
|
68 |
+
width_size = st.number_input(
|
69 |
+
label="Width in inches:",
|
70 |
+
value=default_widthvalue,
|
71 |
+
key=widthkey,
|
72 |
+
)
|
73 |
+
|
74 |
+
with lbary_col:
|
75 |
+
height_size = st.number_input(
|
76 |
+
label="Height in inches:",
|
77 |
+
value=default_heightvalue,
|
78 |
+
key=heightkey,
|
79 |
+
)
|
80 |
+
return width_size, height_size
|
81 |
+
|
82 |
+
|
83 |
+
# zero 0-lag autocorrelation = True
|
84 |
+
# fft
|
85 |
+
|
86 |
+
|
87 |
+
def streamlit_autocorrelation_plot_settings():
|
88 |
+
with st.expander('Autocorrelation Plot Settings:'):
|
89 |
+
lag_col, alpha_col = st.columns(2)
|
90 |
+
|
91 |
+
with lag_col:
|
92 |
+
lags_selected = st.number_input(
|
93 |
+
label="Lags:",
|
94 |
+
value=15)
|
95 |
+
|
96 |
+
with alpha_col:
|
97 |
+
alpha_selected = st.number_input(
|
98 |
+
label="Alpha:",
|
99 |
+
value=0.05)
|
100 |
+
|
101 |
+
zero_include_selected = st.radio(
|
102 |
+
label="Include the 0-lag autocorrelation:",
|
103 |
+
options=('True', 'False'))
|
104 |
+
|
105 |
+
zero_include_selected = zero_include_selected == 'True'
|
106 |
+
|
107 |
+
return [lags_selected,
|
108 |
+
alpha_selected,
|
109 |
+
zero_include_selected]
|
110 |
+
|
111 |
+
|
112 |
+
def display_input_data(data):
|
113 |
+
show_inputted_dataframe(data)
|
114 |
+
|
115 |
+
with st.expander("Box plot"):
|
116 |
+
time_series_box_plot(data)
|
117 |
+
|
118 |
+
with st.expander("Line Plot"):
|
119 |
+
time_series_line_plot(data)
|
120 |
+
|
121 |
+
|
122 |
+
def streamlit_acf_plot_settings():
|
123 |
+
fft_compute_selected = st.radio(
|
124 |
+
label="Compute the ACF via FFT:",
|
125 |
+
options=('False', 'True'))
|
126 |
+
|
127 |
+
fft_compute_selected = fft_compute_selected == 'True'
|
128 |
+
|
129 |
+
return fft_compute_selected
|
130 |
+
|
131 |
+
|
132 |
+
def plotly_corr(corr_array, upper_y, lower_y):
|
133 |
+
fig = go.Figure()
|
134 |
+
[fig.add_scatter(x=(x, x), y=(0, corr_array[0][x]), mode='lines', line_color='#3f3f3f')
|
135 |
+
for x in range(len(corr_array[0]))]
|
136 |
+
fig.add_scatter(x=np.arange(len(corr_array[0])), y=corr_array[0], mode='markers', marker_color='#1f77b4',
|
137 |
+
marker_size=12)
|
138 |
+
fig.add_scatter(x=np.arange(
|
139 |
+
len(corr_array[0])), y=upper_y, mode='lines', line_color='rgba(255,255,255,0)')
|
140 |
+
fig.add_scatter(x=np.arange(len(corr_array[0])), y=lower_y, mode='lines', fillcolor='rgba(32, 146, 230,0.3)',
|
141 |
+
fill='tonexty', line_color='rgba(255,255,255,0)')
|
142 |
+
fig.update_traces(showlegend=False)
|
143 |
+
fig.update_yaxes(zerolinecolor='#000000')
|
144 |
+
return fig
|
145 |
+
|
146 |
+
|
147 |
+
def create_standard_corr_plot(series, plot_pacf=False):
|
148 |
+
corr_array = pacf(series.dropna(), alpha=0.05) if plot_pacf else acf(
|
149 |
+
series.dropna(), alpha=0.05)
|
150 |
+
lower_y = corr_array[1][:, 0] - corr_array[0]
|
151 |
+
upper_y = corr_array[1][:, 1] - corr_array[0]
|
152 |
+
|
153 |
+
fig = plotly_corr(corr_array, upper_y, lower_y)
|
154 |
+
|
155 |
+
title = 'Partial Autocorrelation' if plot_pacf else 'Autocorrelation'
|
156 |
+
fig.update_layout(title=title)
|
157 |
+
st.plotly_chart(fig, use_container_width=True)
|
158 |
+
|
159 |
+
|
160 |
+
def create_acf_plot(data_series,
|
161 |
+
alpha_selected,
|
162 |
+
acf_nlags_selected_plot,
|
163 |
+
acf_fft_selected_plot):
|
164 |
+
|
165 |
+
corr_array = acf(data_series,
|
166 |
+
alpha=alpha_selected,
|
167 |
+
nlags=acf_nlags_selected_plot,
|
168 |
+
fft=acf_fft_selected_plot)
|
169 |
+
|
170 |
+
lower = corr_array[1][:, 0] - corr_array[0]
|
171 |
+
upper = corr_array[1][:, 1] - corr_array[0]
|
172 |
+
fig = plotly_corr(corr_array, upper, lower)
|
173 |
+
title = 'Autocorrelation'
|
174 |
+
fig.update_layout(title=title)
|
175 |
+
st.plotly_chart(fig, use_container_width=True)
|
176 |
+
|
177 |
+
|
178 |
+
def create_pacf_plot(data_series,
|
179 |
+
alpha_selected,
|
180 |
+
acf_nlags_selected,
|
181 |
+
pacf_calculation_method):
|
182 |
+
|
183 |
+
corr_array = pacf(data_series,
|
184 |
+
alpha=alpha_selected,
|
185 |
+
nlags=acf_nlags_selected,
|
186 |
+
method=pacf_calculation_method)
|
187 |
+
lower = corr_array[1][:, 0] - corr_array[0]
|
188 |
+
upper = corr_array[1][:, 1] - corr_array[0]
|
189 |
+
fig = plotly_corr(corr_array, upper, lower)
|
190 |
+
title = 'Partial Autocorrelation'
|
191 |
+
fig.update_layout(title=title)
|
192 |
+
st.plotly_chart(fig, use_container_width=True)
|
test_environment.py
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import sys
|
2 |
+
|
3 |
+
REQUIRED_PYTHON = "python3"
|
4 |
+
|
5 |
+
|
6 |
+
def main():
|
7 |
+
system_major = sys.version_info.major
|
8 |
+
if REQUIRED_PYTHON == "python":
|
9 |
+
required_major = 2
|
10 |
+
elif REQUIRED_PYTHON == "python3":
|
11 |
+
required_major = 3
|
12 |
+
else:
|
13 |
+
raise ValueError("Unrecognized python interpreter: {}".format(
|
14 |
+
REQUIRED_PYTHON))
|
15 |
+
|
16 |
+
if system_major != required_major:
|
17 |
+
raise TypeError(
|
18 |
+
"This project requires Python {}. Found: Python {}".format(
|
19 |
+
required_major, sys.version))
|
20 |
+
else:
|
21 |
+
print(">>> Development environment passes all tests!")
|
22 |
+
|
23 |
+
|
24 |
+
if __name__ == '__main__':
|
25 |
+
main()
|
tox.ini
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
[flake8]
|
2 |
+
max-line-length = 79
|
3 |
+
max-complexity = 10
|