From 6acbf824d9db4f014fcebb32e5733d18e9dd304c Mon Sep 17 00:00:00 2001
From: shawk masboob <masboob.shawk@gmail.com>
Date: Thu, 20 Feb 2020 11:34:01 -0500
Subject: [PATCH] "switching files into different folders, renaming folders,
 simple updates to everything.

---
 .DS_Store                                     | Bin 0 -> 8196 bytes
 .Rhistory                                     |   0
 Models/.DS_Store                              | Bin 0 -> 6148 bytes
 ...raph_Theory_ShawkMasboob-checkpoint.ipynb} |   0
 ...ROJECT_Auto_Documentation-checkpoint.ipynb |   6 +
 Models/0214-PROJECT_Auto_Documentation.ipynb  | 157 ++++++++++++++++++
 Models/0214-PROJECT_Auto_Documentation.py     |  93 -----------
 ...-PROJECT_Auto_Documentation.cpython-37.pyc | Bin 0 -> 2050 bytes
 .../__pycache__/TDA_Regression.cpython-37.pyc | Bin 0 -> 2033 bytes
 README.md                                     |  36 +++-
 {doc => Reports}/.DS_Store                    | Bin 6148 -> 6148 bytes
 .../0117-PROJECT_Proposal.ipynb               |   0
 .../0124-REPORT-Optimization.ipynb            |   0
 Reports/0207-REPORT-Graph_Theory.ipynb        | 100 +++++++++++
 Reports/0221-REPORT-ABM.ipynb                 |  82 +++++++++
 {doc => Reports}/images/TDAscikit.png         | Bin
 {Models => Reports/images}/complex.png        | Bin
 {Models => Reports/images}/simplex.png        | Bin
 Topological_Machine_Learning/.DS_Store        | Bin 6148 -> 6148 bytes
 .../__pycache__/__init__.cpython-37.pyc       | Bin 0 -> 259 bytes
 20 files changed, 373 insertions(+), 101 deletions(-)
 create mode 100644 .DS_Store
 create mode 100644 .Rhistory
 create mode 100644 Models/.DS_Store
 rename Models/{0207-REPORT-Graph_Theory_ShawkMasboob.ipynb => .ipynb_checkpoints/0207-REPORT-Graph_Theory_ShawkMasboob-checkpoint.ipynb} (100%)
 create mode 100644 Models/.ipynb_checkpoints/0214-PROJECT_Auto_Documentation-checkpoint.ipynb
 create mode 100644 Models/0214-PROJECT_Auto_Documentation.ipynb
 delete mode 100644 Models/0214-PROJECT_Auto_Documentation.py
 create mode 100644 Models/__pycache__/0214-PROJECT_Auto_Documentation.cpython-37.pyc
 create mode 100644 Models/__pycache__/TDA_Regression.cpython-37.pyc
 rename {doc => Reports}/.DS_Store (98%)
 rename doc/0117-PROJECT_Proposal_Template.ipynb => Reports/0117-PROJECT_Proposal.ipynb (100%)
 rename Models/0124-REPORT-Optimization_ShawkMasboob.ipynb => Reports/0124-REPORT-Optimization.ipynb (100%)
 create mode 100644 Reports/0207-REPORT-Graph_Theory.ipynb
 create mode 100644 Reports/0221-REPORT-ABM.ipynb 
 rename {doc => Reports}/images/TDAscikit.png (100%)
 rename {Models => Reports/images}/complex.png (100%)
 rename {Models => Reports/images}/simplex.png (100%)
 create mode 100644 Topological_Machine_Learning/__pycache__/__init__.cpython-37.pyc

diff --git a/.DS_Store b/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..106eb59f4485db07cba0073733956842b640812f
GIT binary patch
literal 8196
zcmeHMF^|(Q6n?&zURs3ca1tFDvaoe%=!xnEq+BTj5=e-gU;xxhE~Sl>#8Hz{alNAa
z2p0G^jQjw^a06mtXJ+9!Hg{>7f|M($0$Z}b7yCUs{+?s^xd1@A6I%ml0Dyv-Vf``|
zIf=Eiyp*;?%?=_#d$6WqV7b)7=MPKTvO+Q-8ITM}1|$QLflXil&uo^I73V&0m8E1r
zGVotA!1f0fGeccq%rV(Iut-7x#4=VV3FSHy5LJP?z?dVpU?SWUk(&|`Vi0bQ^{UX*
z1;!k?IS>(i5HT_l5ei90$Ntqs9f;0RmXZO<z%m0Yb}xes6pkQF$M06)I$_jmeIi?>
z@}<hQqSTbymD+P-M5jh%Mw75>j-Rrphh{XggnU4$dnTLKV`nh!*LUtvKQbMEFiwme
zdw?p>o;bcur(HVn?J()b;0=mWRjU1ZV>auwwWg}w+?zMmS*O!#s=Mu5^LbUdc0;>;
z@6da7JUf}ce#_@X#J&JmGjn{Gp5RMk?7P$-Vja`3zyqAP4Fhoh-N`RZ(j98X5%q!;
zNek^B?e)YBj`ID@*N>HN@AtDrdDQ-`PB8lQI2lK5!y|M;fX*00v~jM`_HZ4xceM+z
z!^|{uoRQH;yTdDeJ{9g*(mHp@YONfXWctZC<vJXoLzq)s%tLT+i!f2{<5y#%Ji;0u
z-PVWuSeocMKdBjR*fz2@_Z222$I*rRN-v4v#pxHZBree3`Qlkee>uyn@VjLeWNOor
z;u;tz@je<X{=d8W{(lW4$u1-Vl7UTSfRuYiuZwT@^EY(%MOmz^Vm`#o64Q%0CMH-c
tTm<0r*&l`|tEkFsbb&EPtiklxF9M2(5rxk&dHxssAt}%Q<P-TL#SiY|3Y-7{

literal 0
HcmV?d00001

diff --git a/.Rhistory b/.Rhistory
new file mode 100644
index 0000000..e69de29
diff --git a/Models/.DS_Store b/Models/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..5008ddfcf53c02e82d7eee2e57c38e5672ef89f6
GIT binary patch
literal 6148
zcmeH~Jr2S!425mzP>H1@V-^m;4Wg<&0T*E43hX&L&p$$qDprKhvt+--jT7}7np#A3
zem<@ulZcFPQ@L2!n>{z**<q8>++&mCkOWA81W14cNZ<zv;LbK1Poaz?KmsK2CSc!(
z0ynLxE!0092;Krf2c+FF_Fe*7ECH>lEfg7;MkzE(HCqgga^y>{tEnwC%0;vJ&^%eQ
zLs35+`xjp>T0<F0fCPF1$Cyrb|F7^5{eNG?83~ZUUlGt@xh*qZDeu<Z%US-OSsOPv
j)R!Z4KLME7ReXlK;d!wEw5GODWMKRea10D2@KpjYNUI8I

literal 0
HcmV?d00001

diff --git a/Models/0207-REPORT-Graph_Theory_ShawkMasboob.ipynb b/Models/.ipynb_checkpoints/0207-REPORT-Graph_Theory_ShawkMasboob-checkpoint.ipynb
similarity index 100%
rename from Models/0207-REPORT-Graph_Theory_ShawkMasboob.ipynb
rename to Models/.ipynb_checkpoints/0207-REPORT-Graph_Theory_ShawkMasboob-checkpoint.ipynb
diff --git a/Models/.ipynb_checkpoints/0214-PROJECT_Auto_Documentation-checkpoint.ipynb b/Models/.ipynb_checkpoints/0214-PROJECT_Auto_Documentation-checkpoint.ipynb
new file mode 100644
index 0000000..2fd6442
--- /dev/null
+++ b/Models/.ipynb_checkpoints/0214-PROJECT_Auto_Documentation-checkpoint.ipynb
@@ -0,0 +1,6 @@
+{
+ "cells": [],
+ "metadata": {},
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/Models/0214-PROJECT_Auto_Documentation.ipynb b/Models/0214-PROJECT_Auto_Documentation.ipynb
new file mode 100644
index 0000000..4587eb4
--- /dev/null
+++ b/Models/0214-PROJECT_Auto_Documentation.ipynb
@@ -0,0 +1,157 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# <center>Stub Functions and Automatic Documentation</center>\n",
+    "\n",
+    "<img src=\"https://pdoc3.github.io/pdoc/logo.png\" width=30% alt=\"Sphinx logo\">"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from sklearn import datasets\n",
+    "\n",
+    "def dataload():\n",
+    "    \"\"\"\n",
+    "    upload toy datasets from scikit-learn\n",
+    "    \"\"\"\n",
+    "    data = None\n",
+    "    return data\n",
+    "\n",
+    "def datafetch(file_name):\n",
+    "    \"\"\"\n",
+    "    upload real world datasets from scikit-learn\n",
+    "    \"\"\"\n",
+    "    data = None\n",
+    "    print(\"reading data from:\", file_name)\n",
+    "    return data\n",
+    "\n",
+    "def descriptive_statistic(df):\n",
+    "    \"\"\"\n",
+    "    Provides brief descriptive statistics on dataset. \n",
+    "    Takes dataframe as input.\n",
+    "    \"\"\"\n",
+    "    print(\"Type : \", None, \"\\n\\n\")\n",
+    "    print(\"Shape : \", None)\n",
+    "    print(\"Head -- \\n\", None)\n",
+    "    print(\"\\n\\n Tail -- \\n\", None)\n",
+    "    print(\"Describe : \", None)\n",
+    "    \n",
+    "def model_selection(df):\n",
+    "    \"\"\"\n",
+    "    Takes dateframe as input. Performs foward/backward stepwise\n",
+    "    regression. Returns best model for both methods.\n",
+    "    \"\"\"\n",
+    "    null_fit = None\n",
+    "    foward_step = None\n",
+    "    backward_step = None\n",
+    "    return foward_step, backward_step\n",
+    "\n",
+    "def MSE_fit(fit): \n",
+    "    \"\"\"\n",
+    "    Takes in a fitted model as the input.\n",
+    "    Calculates the MSU of the fitted model.\n",
+    "    Outputs the model's MSE.\n",
+    "    \"\"\"\n",
+    "    MSE = None\n",
+    "    return MSE\n",
+    "\n",
+    "def accuracy_metrics(fit, MSE):\n",
+    "    \"\"\"\n",
+    "    This function is used for model validation. It returns a dictionary\n",
+    "    of several regression model accuracy metrics. Its inputs are a fitted model\n",
+    "    and the MSE of the fitted model.\n",
+    "    \"\"\"\n",
+    "    d = dict()\n",
+    "    sumObj = None\n",
+    "    SSE = None\n",
+    "    n = None\n",
+    "    p = None\n",
+    "    pr = None\n",
+    "    d['R2'] = None\n",
+    "    d['R2ad'] = None\n",
+    "    d['AIC'] = None\n",
+    "    d['BIC'] = None\n",
+    "    d['PRESS'] = None\n",
+    "    d['Cp']= None\n",
+    "    return d"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Help on function accuracy_metrics in module __main__:\n",
+      "\n",
+      "accuracy_metrics(fit, MSE)\n",
+      "    This function is used for model validation. It returns a dictionary\n",
+      "    of several regression model accuracy metrics. Its inputs are a fitted model\n",
+      "    and the MSE of the fitted model.\n",
+      "\n"
+     ]
+    }
+   ],
+   "source": [
+    "# test docstring\n",
+    "help(accuracy_metrics)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# test code\n",
+    "\n",
+    "file_name = 'data.csv'\n",
+    "\n",
+    "a = datafetch(file_name)\n",
+    "print(a)\n",
+    "\n",
+    "b = descriptive_statistic(a)\n",
+    "print(b)\n",
+    "\n",
+    "c = model_selection(a)\n",
+    "print(c)\n",
+    "\n",
+    "d = MSE_fit(c)\n",
+    "print(d)\n",
+    "\n",
+    "print(accuracy_metrics(c, d))"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 3",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.7.3"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/Models/0214-PROJECT_Auto_Documentation.py b/Models/0214-PROJECT_Auto_Documentation.py
deleted file mode 100644
index 81bd176..0000000
--- a/Models/0214-PROJECT_Auto_Documentation.py
+++ /dev/null
@@ -1,93 +0,0 @@
-# importing toy & real world datasets from the scikit-learn library
-
-from sklearn import datasets
-
-def dataload():
-    """
-    upload toy datasets from scikit-learn
-    """
-    data = None
-    return data
-
-def datafetch(file_name):
-    """
-    upload real world datasets from scikit-learn
-    """
-    data = None
-    print("reading data from:", file_name)
-    return data
-
-# standard descriptive statistic analysis of data    
-
-def descriptive_statistic(df):
-    """
-    Provides brief descriptive statistics on dataset. 
-    Takes dataframe as input.
-    """
-    print("Type : ", None, "\n\n")
-    print("Shape : ", None)
-    print("Head -- \n", None)
-    print("\n\n Tail -- \n", None)
-    print("Describe : ", None)
-    
-    
-# model selection
-
-def model_selection(df):
-    """
-    Takes dateframe as input. Performs foward/backward stepwise
-    regression. Returns best model for both methods.
-    """
-    null_fit = None
-    foward_step = None
-    backward_step = None
-    return foward_step, backward_step
-
-# model accuracy 
-
-def MSE_fit(fit): 
-    """
-    Takes in a fitted model as the input.
-    Calculates the MSU of the fitted model.
-    Outputs the model's MSE.
-    """
-    MSE = None
-    return MSE
-
-def accuracy_metrics(fit, MSE):
-    """
-    This function is used for model validation. It returns a dictionary
-    of several regression model accuracy metrics. Its inputs are a fitted model
-    and the MSE of the fitted model.
-    """
-    d = dict()
-    sumObj = None
-    SSE = None
-    n = None
-    p = None
-    pr = None
-    d['R2'] = None
-    d['R2ad'] = None
-    d['AIC'] = None
-    d['BIC'] = None
-    d['PRESS'] = None
-    d['Cp']= None
-    return d
-
-# test code
-
-file_name = 'data.csv'
-
-a = datafetch(file_name)
-print(a)
-
-b = descriptive_statistic(a)
-print(b)
-
-c = model_selection(a)
-print(c)
-
-d = MSE_fit(c)
-print(d)
-
-print(accuracy_metrics(c, d))
\ No newline at end of file
diff --git a/Models/__pycache__/0214-PROJECT_Auto_Documentation.cpython-37.pyc b/Models/__pycache__/0214-PROJECT_Auto_Documentation.cpython-37.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..07bd442147a3b679ee435f0f44f744bb0192657e
GIT binary patch
literal 2050
zcmah~!EWO=5EUiamSsEHF1mZ@i9ib!J|x}-MbR#TqPy7?O@VcSID65d1})ODYFQFU
zDrp4wlzu_~qQBA4(6y)jLQkC;If)aXC=GIEBn~z2%?$l^FbEwO?9=am8h0G$FEs9k
z56X8i&F`>CN3zsev4}B8y3+g2iCpQ+7D!LFWe21$1K9=HlA-K@Y|FkJfb7VfC{UgX
zZdue-q3Yhk8Olc)^}z0t-6FdWcAxAv%4h)gfb39pR3CH~Ex9{!hR^<nxSS!Y11YR9
z%9<D-;9#411kwbsNcO9<cEHK4Mj?k$7HKXdxA_gf^X7A%uegbmWn%YIC3HqR$A92A
zeB7lyIEt2kW}^>AX)`hlalKp#Gt2YY=$$f4n-?RPd757)u}G&Y5igQVP3gcSyBxjG
zrAp1{)xj5E?wy^V{&;+JF@00oeEKer%azKkut}cn7dH(q46}tWFddHXGu;LmBSIW5
zOpJ<<J_iMmG7BkMJGU-#)(kLv)mPh?wGt`6&UGsP7kc#^HcN<_b`n9afs9t6lgtj?
zx;sx&HO<6IX$<dyAB`c`T_kO;Y`o}$X)tg^5Hj5b?H&VSL3hINH7F22!y>!&80x-s
z5esOod#!!1bs9~&fHbHgLVBk2t3)cpXF5@H4qB{}!X{UW8|2s6BsM(H?g-fD<aQyJ
z;6p)cz{G{&Nmi70pWarTi<?66*L+;LVORx|g&?!){s1KLy*(aQ{V?PgB1s#o2L%Bi
zo}tr_X=pGkC0EX$67^y$!s&g4FJZr-{%rMuH2movJi8|jNy7pl;ywlilMmB{>8;(h
zH%C(3s(Or0%^#!VXG+g=y)t~BUkfcqGZ8OQ0kTwaoft*_T3u>o3>GIpSGLp{@T-jF
zE39bn)qIxQ1z#z<$fapW8xz!_uMtNSWM!I8=ZTGa&8GmYDx$&N2g!U&&MuulH7Zpx
zw#H|05;e8c@)`a#R@Xf#NC;LGk+SK1Mt$H`zibhaWL$6v#VWZ$0O__1wVm=Kk;Y{z
zEK~))c|ZBU^Es*8jt$b&(n6^;8!6(?4LBS>qQR{_aI8Ie{uE^go*^@@5FIAgX#=ud
zkTD{{fnYqCglr6ob#9#|*^qD_W(#H;W=FEME8RmV2$0@J`WxBW$o58d4xM$=J%2X*
zED|V%G9$J*=%oRUFuzTvuSA+alTaa?Sgtp<Ah=A(Md%wk2dFaYN@=)m9%|=~>o_j8
zh;Oi3G&JKrKLPHj+YA>Pdhbzg=sc0Zg-vNV{_ix5Blr9u^3M;1jJ!7|N0Ijyme$$%
z@njOYM+M#G5a%$^1E5gvz)~|AwN1G?o&6knlgV+-vYHjOTj-}*dL}k;Pd9OY1McP%
zSe$m7im@rlfIa;53wT@6pVWSAuExWisAHD&f1ok7bfMFYNZn1<Oo=B)jajdmpxdKn
tvzo>FBlN;b*jnhq4GtkgWvaf#mxjaAcY{t420KAN=msx+7s7fp{{<*)>kt3{

literal 0
HcmV?d00001

diff --git a/Models/__pycache__/TDA_Regression.cpython-37.pyc b/Models/__pycache__/TDA_Regression.cpython-37.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..45a527c8e108885616b6980d4be3ffc01d6e9fdc
GIT binary patch
literal 2033
zcmah~!EWO=5EUuemK8hMF1mZ@i9ib!J|tf3p=cLD(cNr{?qR)YoW1B!gBIynwJZrF
zoiu`bO243g(ckE2=-N|%p{LG_9LEVzlm<C75{H`iW`=&#??)aC_WAcejXIw97aDgX
z0OdQF_IFs6r&#7KS<INHxblDVVy*(!0_m%^>VOPXsJb9qDpGqO+p4GfAUkS54z;ht
zTNZb9q`S9ph7M51dtmR8-6FdOc8}~f%D4}9pX^9?bPsfnmO2=FgC~DOT;71yp^{Eo
z?QDV%aInoh0ck^66#La%d*H;YVI*Lz%B+w|xZ+0Kd5f7Tmcl0KB6WwEmL{j2qd)K)
z0q!zB97W4NKMp@yZS2s_<@I7I?X)PS!?)TlTu}~T7FltbCNi5WWin55J)r~B{Brod
zP&%{2i?^>Q=larUYtte>DsLJX7*z|SM)tiRV5SW+K|nY-Obmj6Jp~1TF$Wo0d$*i<
zYX%Vg>Z@IdM$1fG7ba8xi?n(Qn-v5}JBg21fW1|kG<O4DcV}s)C%If|gW=uNVKC&n
ziyY0gOXfW=Ee4JVB4)avZ80DgbSDg7g97n0tf^a{p&lrXSU_uSwf0u)G@9aoG^}Dm
zdS;5NRB0=wCe<?mT4K`DrB_;5CtYe?npjcfcLW>>a=VZV@S&g$U=q@bG%r`~h~8G6
zi<?r5S7KE0D5}EoT#{LJe*lui;h~7CUKEK7nP!c(2l<B&Ptj?>4D=IL4p+0sL_OPy
zaI%f?IqbL8j;$V$hCj0h&$h%NX;=V6+^3*m@?mnA{+h4-8Ir=Q>LEHkdx%b)X)`O#
z(u!GeEsYvZWwJm8$kOF?YBl+r`{ESm+O13u{A%mO5(^l7jhGg0E|%KO3uPP9Mg(;b
z7{n2W`6|mMv(&|V&8GmYF5~{)2g!U)&H<f2u{zU<gVy*0PNE+6S^>kK!Rp$Qf`nj2
z5h<HiXVeG0`elcRG#64pC{C*l0!X);>)n)}$Shf9(m_?=oA=|7qL`7o>)0UuuyRl;
z%|?p&OA8LC4`|@E5014D&mW`gz%yj#1){^mdTl^<05U;DI1r2vlaP%-ao(-hBpVVQ
zz-+;6!|W)w=87LfL4fo(GT6x0Mz%MybL_2~?)kIfXP!bSta4&ofWET85$3na^p(s~
zXcD}o#5*U<rWT}7DY-~<L+1chR$plY*Uf$H+;N>GD<hK|tQG^!h$n5j%?fFt_a5Yi
z&XYOZ&y<GK|4zdw=I1Zt;QXajvH$wrN$kIYrFC|GIv&UTq@=4G;v9sg4-}exSZb!?
zwp}ehOn;93@%Xf6dCkh2m*#Pnp2$tylTF;;fV=$+7O&l=Vr)t>WcNSw4Bl4spLCSi
ztI=RT?$`xg{{~Y_7dqXD)ZJ9glz4J9nDv@Tx;<((tyy9|K`*R?orNyk@E9_*%JjGR
Y(h695JnV!~xF7byZumUl5Y~hFFXQ0loB#j-

literal 0
HcmV?d00001

diff --git a/README.md b/README.md
index 07176de..1885508 100644
--- a/README.md
+++ b/README.md
@@ -1,13 +1,33 @@
+
+# Topological Machine Learning
+
 The first 1/3 of this project will require the utilization of traditional machine learning methods on a dataset. The purpose of this portion of the project is promote the benefits of TDA. It is used to show data scientists that they can achieve very interesting results when using TDA. The second portion of this project will be incorporating TDA with machine learning. This portion of the project will be the most demanding due to lack of references. I will attempt to perform both a classification type project and prediction type project. Additionally, I may have to rely on HPCC to run my script (TDA is computationally expensive). The final portion of the project will be creating a Jupyter demo project for data scientists as well as a short semi-theoretical document that discusses the more important features of TDA.
 
 I hope to create a script that is easy for those who are not familiar with TDA to follow. Ideally, I would like to create a document that scikit-tda is willing to publish. Hence, the document needs to be constructed in a manner that is easy to interpret by anyone who is new to TDA.
 
-Thankfully, python is already has the libraries that I will need. They are as follows:
+Thankfully, python is already has the libraries that I will need. They are as follows: scipy, numpy, matplotlib, pandas, seaborn, scikit-tda
+
+## Getting Started
+
+These instructions will get you a copy of the project up and running on your local machine for development and testing purposes. See deployment for notes on how to deploy the project on a live system.
+
+### Prerequisites
+
+You may need to install the Python TDA library. The installation can be done in Pypi using just one command: pip install scikit-tda. Some may need to install seaborn, pandas, or numpy.
+
+```
+`pip install scikit-tda`
+```
+
+## Authors
+
+* **Shawk Masboob**
+
+
+## License
+
+* UPDATE
+
+## Acknowledgments
 
-scipy
-numpy
-matplotlib
-pandas
-seaborn
-sklearn
-scikit-tda
\ No newline at end of file
+* UPDATE
\ No newline at end of file
diff --git a/doc/.DS_Store b/Reports/.DS_Store
similarity index 98%
rename from doc/.DS_Store
rename to Reports/.DS_Store
index 6e27a44abf0d397be0acd102067cfe58e9169201..96e99f845f70dddd9ce0e5ed953f3bd46058b856 100644
GIT binary patch
delta 16
XcmZoMXffEZnu*!a(s1)SrZ7<eFUtio

delta 16
XcmZoMXffEZnu*!O!g%vKrZ7<eFX#m}

diff --git a/doc/0117-PROJECT_Proposal_Template.ipynb b/Reports/0117-PROJECT_Proposal.ipynb
similarity index 100%
rename from doc/0117-PROJECT_Proposal_Template.ipynb
rename to Reports/0117-PROJECT_Proposal.ipynb
diff --git a/Models/0124-REPORT-Optimization_ShawkMasboob.ipynb b/Reports/0124-REPORT-Optimization.ipynb
similarity index 100%
rename from Models/0124-REPORT-Optimization_ShawkMasboob.ipynb
rename to Reports/0124-REPORT-Optimization.ipynb
diff --git a/Reports/0207-REPORT-Graph_Theory.ipynb b/Reports/0207-REPORT-Graph_Theory.ipynb
new file mode 100644
index 0000000..9cc6b8c
--- /dev/null
+++ b/Reports/0207-REPORT-Graph_Theory.ipynb
@@ -0,0 +1,100 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# <center>Graph Theory within Topological Data Analysis</center>\n",
+    "\n",
+    "<center>by Shawk Masboob</center>"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Graph theory utilizes limited topological thought (i.e. it can be considered a subset of topology). Topological objects consist of nodes, edges, and faces. In graph theory, these objects arise through natural geo-spatial data (cell tower data, election data, etc.) or through artificial construction (social complexity theory, political relations theory, etc.). Some application of graph theory involve using graphs modeling relations and processes in physical, biological, and social and information systems. Graph theory is also used to represent computer networks. \n",
+    "\n",
+    "Graph theory and topology are related in that graphs are a 1-simplicial complex. A simplicial complex is a finite collection of simplices. [1]. A simplex is a generalized triangle in any arbitrary dimension where the 0-simplex is a point, the 1-simplex is a line segment, the 2-simplex is a triangle, the 3-simplex is a tetrahedron, the 4-simplex is a 5-cell, and so on. The figure below provides a simple example of simplices."
+   ]
+  },
+  {
+   "attachments": {},
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "| ![alt text](simplex.png \"simplex\") | \n",
+    "|:--:| \n",
+    "|__Figure 1.__ From left to right: a point (vertex), a line segment (an edge), a triangle, and a tetrahedron.\n",
+    "Image Source: [1] |"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "The shift from the 1-simplicial complex and the 2-simplicial complex provides a simple demonstration of the relationship between graph theory and topology. It is challenging to discuss concepts within topology without implicitly referencing properties of graph theory. \n",
+    "\n",
+    "As mentioned above, there are many applications within graph theory. These same applications can be done using topology, specifically topological data analysis (TDA). TDA seeks to analyze datasets using various techniques from topology. It is beneficial to convert the data points with simplicial complexes based on some distance metric. A simple way of converting data points into a global object is to use each point as the vertices of a graph and to let the edges of the graph be determined by proximity. [2]. The graph at this stage (1-simplex), while capturing connectivity within the data, fails to capture higher order features beyond clustering. [2]. Moving up the “simplicial scales” allows the researcher to see other interesting features that may not be so obvious when using a standard graph. The Cech Complex and the Rips Complex are the two most common methods used to “fill in” the higher dimensional simplices. [2] Figure 2 demonstrates the process of converting a dataset into a higher order simplicial complex. "
+   ]
+  },
+  {
+   "attachments": {},
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "| ![alt text](complex.png \"complex\") | \n",
+    "|:--:| \n",
+    "|__Figure 2.__ The bottom left image is the Cech Complex and the bottom right image is the Rips Complex.\n",
+    "Image Source: [2] |"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "As seen from Figure 2, any point cloud data can be converted into a higher order simplicial complex after selecting a specified proximity measure. \n",
+    "\n",
+    "A real-world example that combines graph theory and TDA is as follows: the human brain can be visualized using graph theory. However, the human brain is a very complex network that is hard to visualize. MAPPER (a Python TDA package) can be used to reduce the high-dimensional dataset without having to make many assumptions about the data. [3]. Doing so allows the researcher to visualize the data using some graph. \n",
+    "\n",
+    "TDA is used to understand complex and high dimensional data problems. The simple technique mentioned above is capable to pointing out the natural patterns within a dataset that a standard graph cannot. Graph theory is useful if one seeks construct a network. If one seeks to go beyond network construction and clustering, they ought to consider using TDA. That said, TDA is not independent of graph theory. There are some theoretical similarities and applications. As noted above, a graph is a 1-simplicial complex. "
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "---\n",
+    "# References\n",
+    "\n",
+    "[1] Edelsbrunner, Herbert. “Simplicial Complexes.” COMPUTATIONAL TOPOLOGY, www2.cs.duke.edu/courses/fall06/cps296.1/.\n",
+    "\n",
+    "[2] Ghrist, Robert. “Barcodes: The Persistent Topology of Data.” Bulletin of the American Mathematical Society, vol. 45, no. 01, 2007, pp. 61–76., doi:10.1090/s0273-0979-07-01191-3.\n",
+    "\n",
+    "[3] Saggar, M., Sporns, O., Gonzalez-Castillo, J. et al. Towards a new approach to reveal dynamical organization of the brain using topological data analysis. Nat Commun 9, 1399 (2018). https://doi.org/10.1038/s41467-018-03664-4"
+   ]
+  }
+ ],
+ "metadata": {
+  "anaconda-cloud": {},
+  "kernelspec": {
+   "display_name": "Python 3",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.7.3"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 1
+}
diff --git a/Reports/0221-REPORT-ABM.ipynb  b/Reports/0221-REPORT-ABM.ipynb 
new file mode 100644
index 0000000..5b5fd84
--- /dev/null
+++ b/Reports/0221-REPORT-ABM.ipynb 	
@@ -0,0 +1,82 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "In order to successfully complete this assignment you need to commit this report to your project git repository on or before **11:59pm on Friday February 21**."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# <center>Using Agent Based Models in #your research area here#</center>\n",
+    "\n",
+    "<center>by #Your name#</center>"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "---\n",
+    "# Instructions\n",
+    "\n",
+    "Write a ~1-2 page report about how Agent Based Models (ABMs) could be used in your area of research.  There are three basic forms to this report:\n",
+    "\n",
+    "1. ABMs are already used in your area of research.  Provide a summary and include references.  If possible include a short example that could be demonstrated in class. \n",
+    "2. Describe how you could incorporate ABMs into your research domain.  What research questions might you be able to ask, improve or validate?\n",
+    "3. ABMs can not be used in your area of research.  Explain in detail why they can't be used.  \n",
+    "\n",
+    "To be clear, I think 1 and 2 will be the most common reports.  If you choose 3 you need to make a very convincing and thought out argument. \n",
+    "\n",
+    "You can write your report in any tool you want. I recommend Jupyter notebooks but Word, Latex, Google docs are all fine. If you use Jupyter notebooks (which I tend to like) make sure you do not include these instructions. Write the report so someone outside of the course could make sense of how ODEs can be used.\n",
+    "\n",
+    "**NOTE:** Make sure you remove all instructions from your reports.  Make them readable so someone outside of the course could make sense of them.\n"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "---\n",
+    "# References\n",
+    "\n",
+    "In all three example provide some references to papers you found that help illustrate your argument. I prefer references with links to papers.  \n"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "-----\n",
+    "### Congratulations, you are done!\n",
+    "\n",
+    "Now, you just need to commit and push this report to your project git repository. "
+   ]
+  }
+ ],
+ "metadata": {
+  "anaconda-cloud": {},
+  "kernelspec": {
+   "display_name": "Python 3",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.7.3"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 1
+}
diff --git a/doc/images/TDAscikit.png b/Reports/images/TDAscikit.png
similarity index 100%
rename from doc/images/TDAscikit.png
rename to Reports/images/TDAscikit.png
diff --git a/Models/complex.png b/Reports/images/complex.png
similarity index 100%
rename from Models/complex.png
rename to Reports/images/complex.png
diff --git a/Models/simplex.png b/Reports/images/simplex.png
similarity index 100%
rename from Models/simplex.png
rename to Reports/images/simplex.png
diff --git a/Topological_Machine_Learning/.DS_Store b/Topological_Machine_Learning/.DS_Store
index d626a404444af1ca378c8e6d5ccff55076c3097d..7196b2f952ea476a0da897cb355f6bf9b0581816 100644
GIT binary patch
delta 67
zcmZoMXfc=|#>B`mu~2NHo+2a5#DLw5ER%VdHgE1{u3*``fcXOBW_AvK4xp0F6Pdp=
VPv#e~<X`{-Mg|6^%>g1?m;rMx5a9p-

delta 231
zcmZoMXfc=|#>B)qu~2NHo+2a1#DLw4FEBDPa!lr7+$_n@5X_LzP|8rmkj#+E5YOPo
zkjan(WED?VV3e%KDa)Y8P{2@`oA2V1l#`#tz`($v)|hL^{_&tAkWElm3X!_lAWqr5
mjBz5%W_AvK4xl$SA7uQ_Jegm_l7o?f0f-nFHV24oVFmy<bTlde

diff --git a/Topological_Machine_Learning/__pycache__/__init__.cpython-37.pyc b/Topological_Machine_Learning/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1cdb03ac8b5d5281d83237dfc0207ee9fc9ba3dd
GIT binary patch
literal 259
zcmZ?b<>g`k0`1#|aUMYWF^B^LtU!hX5ElypiFAfch7^Vr#uTPGEQ|~(%)tzrEH7Dr
z5-*v6geJ=^*5uTjoZ={s+|-i9l*E$6D2}|OwEUvn#F8jJ7%M(EGcUi$ZzV$!NGF2$
zm8c(DoLW?@U!0Lxo}HUmoRpuRr0<eioL!P%pby0PIr-_C$%#4fxrxacnR%)4IjM<7
xd6{|X`XNY?zEDXYm}Go>W?p7Ve7s&k<t+}I-29Z%oK!oIdyCnC1QR0&0s#BTNE!eD

literal 0
HcmV?d00001

-- 
GitLab