|
1 |
| -\documentclass[MASTER.tex]{subfiles} |
2 |
| -\begin{document} |
3 |
| - |
4 |
| - |
5 |
| - \begin{frame} |
6 |
| - \huge |
| 1 | + |
7 | 2 | \[ \mbox{Machine Learning with Python} \]
|
8 |
| - \end{frame} |
9 | 3 |
|
10 |
| - %===========================================================%%=======================================================================% |
11 |
| -\begin{frame} |
12 |
| -\Large |
| 4 | + |
| 5 | + <p>%=======================================================================% |
| 6 | + |
| 7 | + |
13 | 8 | \textbf{Machine Learning}
|
14 |
| -\begin{itemize} |
15 |
| -\item Machine Learning is a discipline involving algorithms designed to find patterns in and make predictions about data. |
16 |
| -\item It is nearly ubiquitous in our world today, and used in everything from web searches to financial forecasts to studies of the nature of the Universe. \item This workshop will cover an introduction to scikit-learn, a python machine learning package, and to the central concepts of Machine Learning. |
17 |
| -\end{itemize} |
| 9 | + |
| 10 | +* Machine Learning is a discipline involving algorithms designed to find patterns in and make predictions about data. |
| 11 | +* It is nearly ubiquitous in our world today, and used in everything from web searches to financial forecasts to studies of the nature of the Universe. * This workshop will cover an introduction to scikit-learn, a python machine learning package, and to the central concepts of Machine Learning. |
| 12 | + |
| 13 | + |
18 | 14 |
|
19 | 15 |
|
20 |
| -\end{frame} |
21 | 16 | %=======================================================================%
|
22 |
| -\begin{frame} |
23 |
| -\Large \textbf{Machine Learning}\\ |
| 17 | + |
| 18 | + \textbf{Machine Learning}\\ |
24 | 19 | We will introduce the basic categories of learning problems and how to implement them using scikit-learn.
|
25 | 20 | % From this foundation, we will explore practical examples of machine learning using real-world data, from handwriting analysis to automated classification of astronomical images.
|
26 | 21 |
|
27 |
| -\begin{itemize} |
28 |
| - \item Regression : Predicting Numeric Values |
29 |
| - \item Classification : Predicting Categories |
30 |
| - \item Clustering : assigning instances to groups. |
31 |
| -\end{itemize} |
32 |
| -\end{frame} |
| 22 | + |
| 23 | +* Regression : Predicting Numeric Values |
| 24 | +* Classification : Predicting Categories |
| 25 | +* Clustering : assigning instances to groups. |
| 26 | + |
| 27 | + |
33 | 28 | %=======================================================================%
|
34 |
| -\begin{frame}[fragile] |
| 29 | + |
35 | 30 | \textbf{Getting ready}
|
36 | 31 | The datasets in scikit-learn are contained within the datasets module. Use the following
|
37 | 32 | command to import these datasets:
|
38 |
| -\begin{framed} |
| 33 | +<pre> |
39 | 34 | \begin{verbatim}
|
40 | 35 | >>> from sklearn import datasets
|
41 | 36 | >>> import numpy as np
|
42 | 37 | \end{verbatim}
|
43 | 38 | \end{framed}
|
44 | 39 |
|
45 |
| -\end{frame} |
| 40 | + |
46 | 41 | %======================================================================== %
|
47 |
| -\begin{frame} |
| 42 | + |
48 | 43 | \begin{figure}
|
49 | 44 | \centering
|
50 | 45 | \includegraphics[width=1.2\linewidth]{images/SKLsite}
|
51 | 46 |
|
52 | 47 | \end{figure}
|
53 | 48 |
|
54 |
| -\end{frame} |
| 49 | + |
55 | 50 | %=======================================================================%
|
56 |
| -\begin{frame} |
57 |
| - \LARGE |
| 51 | + |
| 52 | + |
58 | 53 | \textbf{Classification}
|
59 |
| -\begin{itemize} |
60 |
| -\item \textbf{Description:} Identifying to which category an object belongs to. |
61 |
| -\item \textbf{Applications:} Spam detection, Image recognition. |
62 |
| -\item \textbf{Algorithms:} SVM, nearest neighbors, random forest, |
63 |
| -\end{itemize} |
64 | 54 |
|
65 |
| -\end{frame} |
| 55 | +* \textbf{Description:} Identifying to which category an object belongs to. |
| 56 | +* \textbf{Applications:} Spam detection, Image recognition. |
| 57 | +* \textbf{Algorithms:} SVM, nearest neighbors, random forest, |
| 58 | + |
| 59 | + |
| 60 | + |
66 | 61 | %=======================================================================%
|
67 |
| -\begin{frame} |
68 |
| -\LARGE |
| 62 | + |
| 63 | + |
69 | 64 | \textbf{Regression}
|
70 |
| -\begin{itemize} |
71 |
| -\item \textbf{Description:} Predicting a continuous-valued attribute associated with an object. |
72 |
| -\item \textbf{Applications:} Drug response, Stock prices. |
73 |
| -\item \textbf{Algorithms:} SVR, ridge regression, Lasso, |
74 | 65 |
|
75 |
| -\end{itemize} |
| 66 | +* \textbf{Description:} Predicting a continuous-valued attribute associated with an object. |
| 67 | +* \textbf{Applications:} Drug response, Stock prices. |
| 68 | +* \textbf{Algorithms:} SVR, ridge regression, Lasso, |
| 69 | + |
| 70 | + |
| 71 | + |
76 | 72 |
|
77 |
| -\end{frame} |
78 | 73 | %=======================================================================%
|
79 |
| -\begin{frame} |
80 |
| - \LARGE |
| 74 | + |
| 75 | + |
81 | 76 | \textbf{Clustering}\\
|
82 | 77 |
|
83 | 78 | Automatic grouping of similar objects into sets.
|
84 | 79 | \begin{description}
|
85 |
| -\item[Applications:] Customer segmentation, Grouping experiment outcomes |
86 |
| -\item[Algorithms:] k-Means, spectral clustering, mean-shift, ... |
| 80 | +* [Applications:] Customer segmentation, Grouping experiment outcomes |
| 81 | +* [Algorithms:] k-Means, spectral clustering, mean-shift, ... |
87 | 82 | \end{description}
|
88 |
| -\end{frame} |
| 83 | + |
89 | 84 | %=======================================================================%
|
90 |
| -\begin{frame} |
91 |
| - \LARGE |
| 85 | + |
| 86 | + |
92 | 87 | \textbf{Dimensionality Reduction}\\
|
93 | 88 |
|
94 |
| -\begin{itemize} |
95 |
| - \item \textbf{Description: } Reducing the number of random variables to consider. |
96 |
| -\item \textbf{Applications:} Visualization, Increased efficiency |
97 |
| -\item \textbf{Algorithms:} PCA, feature selection, non-negative matrix factorization. |
98 |
| -\end{itemize} |
99 |
| -\end{frame} |
| 89 | + |
| 90 | +* \textbf{Description: } Reducing the number of random variables to consider. |
| 91 | +* \textbf{Applications:} Visualization, Increased efficiency |
| 92 | +* \textbf{Algorithms:} PCA, feature selection, non-negative matrix factorization. |
| 93 | + |
| 94 | + |
100 | 95 | %=======================================================================%
|
101 |
| -\begin{frame} |
102 |
| -\LARGE |
| 96 | + |
| 97 | + |
103 | 98 | \textbf{Model selection}\\
|
104 |
| -\begin{itemize} |
105 |
| - \item \textbf{Description: } Comparing, validating and choosing parameters and models. |
106 |
| -\item \textbf{Goal:} Improved accuracy via parameter tuning |
107 |
| -\item \textbf{Modules:} grid search, cross validation, metrics |
108 |
| -\end{itemize} |
109 |
| -\end{frame} |
| 99 | + |
| 100 | +* \textbf{Description: } Comparing, validating and choosing parameters and models. |
| 101 | +* \textbf{Goal:} Improved accuracy via parameter tuning |
| 102 | +* \textbf{Modules:} grid search, cross validation, metrics |
| 103 | + |
| 104 | + |
110 | 105 | %=======================================================================%
|
111 |
| -\begin{frame} |
112 |
| -\LARGE |
| 106 | + |
| 107 | + |
113 | 108 | \textbf{Preprocessing}\\
|
114 |
| -\begin{itemize} |
115 |
| -\item \textbf{Description:} Feature extraction and normalization. |
116 |
| -\item \textbf{Application:} Transforming input data such as text for use with machine learning algorithms. |
117 |
| -\item \textbf{Modules:} preprocessing, feature extraction. |
118 |
| -\end{itemize} |
119 |
| -\end{frame} |
| 109 | + |
| 110 | +* \textbf{Description:} Feature extraction and normalization. |
| 111 | +* \textbf{Application:} Transforming input data such as text for use with machine learning algorithms. |
| 112 | +* \textbf{Modules:} preprocessing, feature extraction. |
| 113 | + |
| 114 | + |
120 | 115 |
|
121 | 116 | \end{document}
|
0 commit comments