custom_macros.tex

Download
latex 382 lines 10.7 KB
  1% custom_macros.tex
  2% Demonstrates custom commands, environments, and macro creation
  3% Shows how to create reusable LaTeX components for efficient document writing
  4
  5\documentclass[12pt,a4paper]{article}
  6
  7\usepackage{amsmath, amssymb, amsthm}
  8\usepackage{xcolor}
  9\usepackage{tcolorbox}
 10\usepackage[margin=1in]{geometry}
 11\usepackage{tikz}
 12\usepackage{enumitem}
 13
 14% ==================== Custom Math Commands ====================
 15
 16% Vector notation
 17\newcommand{\vect}[1]{\mathbf{#1}}
 18\newcommand{\uvect}[1]{\hat{\mathbf{#1}}}  % Unit vector
 19
 20% Matrix notation
 21\newcommand{\mat}[1]{\mathbf{#1}}
 22\newcommand{\trans}[1]{#1^{\mathsf{T}}}  % Transpose
 23\newcommand{\inv}[1]{#1^{-1}}  % Inverse
 24
 25% Common sets
 26\newcommand{\R}{\mathbb{R}}  % Real numbers
 27\newcommand{\C}{\mathbb{C}}  % Complex numbers
 28\newcommand{\N}{\mathbb{N}}  % Natural numbers
 29\newcommand{\Z}{\mathbb{Z}}  % Integers
 30\newcommand{\Q}{\mathbb{Q}}  % Rational numbers
 31
 32% Probability and statistics
 33\newcommand{\E}[1]{\mathbb{E}\left[#1\right]}  % Expectation
 34\newcommand{\Var}[1]{\text{Var}\left[#1\right]}  % Variance
 35\newcommand{\Cov}[2]{\text{Cov}\left[#1, #2\right]}  % Covariance
 36\newcommand{\Prob}[1]{\mathbb{P}\left(#1\right)}  % Probability
 37
 38% Calculus operators
 39\newcommand{\diff}[2]{\frac{d#1}{d#2}}  % Derivative
 40\newcommand{\pdiff}[2]{\frac{\partial #1}{\partial #2}}  % Partial derivative
 41\newcommand{\grad}{\nabla}  % Gradient
 42\newcommand{\divg}{\nabla \cdot}  % Divergence
 43\newcommand{\curl}{\nabla \times}  % Curl
 44
 45% Norms
 46\newcommand{\norm}[1]{\left\lVert #1 \right\rVert}  % General norm
 47\newcommand{\abs}[1]{\left| #1 \right|}  % Absolute value
 48\newcommand{\inner}[2]{\langle #1, #2 \rangle}  % Inner product
 49
 50% Machine learning specific
 51\newcommand{\loss}{\mathcal{L}}  % Loss function
 52\newcommand{\hypothesis}{\mathcal{H}}  % Hypothesis space
 53\newcommand{\weights}{\vect{w}}  % Weight vector
 54\newcommand{\features}{\vect{x}}  % Feature vector
 55\newcommand{\labels}{\vect{y}}  % Label vector
 56
 57% Argmax and argmin
 58\DeclareMathOperator*{\argmax}{arg\,max}
 59\DeclareMathOperator*{\argmin}{arg\,min}
 60
 61% ==================== Custom Commands with Optional Arguments ====================
 62
 63% Define a command with default and optional values
 64\newcommand{\derivative}[3][1]{%
 65    \ifnum#1=1
 66        \frac{d#2}{d#3}%
 67    \else
 68        \frac{d^{#1}#2}{d#3^{#1}}%
 69    \fi
 70}
 71
 72% Matrix with size specification
 73\newcommand{\bigmat}[2][r]{%
 74    \begin{bmatrix}
 75        #2
 76    \end{bmatrix}_{#1}
 77}
 78
 79% ==================== Custom Environments ====================
 80
 81% Theorem-like environments
 82\newtheorem{theorem}{Theorem}[section]
 83\newtheorem{lemma}[theorem]{Lemma}
 84\newtheorem{proposition}[theorem]{Proposition}
 85\newtheorem{corollary}[theorem]{Corollary}
 86
 87\theoremstyle{definition}
 88\newtheorem{definition}[theorem]{Definition}
 89\newtheorem{example}[theorem]{Example}
 90
 91\theoremstyle{remark}
 92\newtheorem{remark}[theorem]{Remark}
 93
 94% Custom colored boxes
 95\newtcolorbox{notebox}{
 96    colback=blue!5!white,
 97    colframe=blue!75!black,
 98    title=Note,
 99    fonttitle=\bfseries
100}
101
102\newtcolorbox{warningbox}{
103    colback=red!5!white,
104    colframe=red!75!black,
105    title=Warning,
106    fonttitle=\bfseries
107}
108
109\newtcolorbox{tipbox}{
110    colback=green!5!white,
111    colframe=green!75!black,
112    title=Tip,
113    fonttitle=\bfseries
114}
115
116% Custom proof-like environment with custom ending
117\newenvironment{solution}
118    {\begin{proof}[Solution]}
119    {\end{proof}}
120
121% Algorithm environment
122\newcounter{algorithm}
123\newenvironment{algorithm}[1][]{%
124    \refstepcounter{algorithm}%
125    \begin{tcolorbox}[
126        colback=yellow!10!white,
127        colframe=orange!75!black,
128        title=Algorithm \thealgorithm: #1,
129        fonttitle=\bfseries
130    ]
131}{%
132    \end{tcolorbox}
133}
134
135% ==================== Renewing Existing Commands ====================
136
137% Redefine the vec command to use bold instead of arrow
138\renewcommand{\vec}[1]{\mathbf{#1}}
139
140% Customize the equation numbering format
141\renewcommand{\theequation}{\thesection.\arabic{equation}}
142
143% ==================== Document Content ====================
144
145\title{Custom \LaTeX\ Commands and Environments}
146\author{LaTeX Student}
147\date{\today}
148
149\begin{document}
150
151\maketitle
152
153\tableofcontents
154
155\section{Custom Math Commands}
156
157\subsection{Vector and Matrix Notation}
158
159Using custom vector commands makes equations cleaner and easier to modify:
160
161\begin{equation}
162    \vect{v} = \begin{bmatrix} v_1 \\ v_2 \\ v_3 \end{bmatrix}, \quad
163    \uvect{n} = \frac{\vect{v}}{\norm{\vect{v}}}
164\end{equation}
165
166Matrix operations become more readable:
167
168\begin{equation}
169    \mat{A}\vect{x} = \vect{b}, \quad
170    \vect{x} = \inv{\mat{A}}\vect{b}, \quad
171    \mat{B} = \trans{\mat{A}}\mat{A}
172\end{equation}
173
174\subsection{Common Mathematical Sets}
175
176The custom set commands simplify notation:
177
178\begin{itemize}
179    \item Real numbers: $\R$, $\R^n$
180    \item Complex numbers: $\C$
181    \item Integers: $\Z$
182    \item Natural numbers: $\N$
183    \item Rational numbers: $\Q$
184\end{itemize}
185
186\subsection{Probability and Statistics}
187
188Custom probability commands ensure consistent notation:
189
190\begin{align}
191    \E{X} &= \sum_{i} x_i p(x_i) \\
192    \Var{X} &= \E{X^2} - \left(\E{X}\right)^2 \\
193    \Cov{X}{Y} &= \E{XY} - \E{X}\E{Y} \\
194    \Prob{X = x} &= p(x)
195\end{align}
196
197\subsection{Calculus Operators}
198
199\begin{example}[Derivatives and Gradients]
200First derivative: $\diff{f}{x}$
201
202Partial derivative: $\pdiff{f}{x}$
203
204Gradient: $\grad f = \begin{bmatrix} \pdiff{f}{x_1} \\ \pdiff{f}{x_2} \\ \vdots \\ \pdiff{f}{x_n} \end{bmatrix}$
205
206Divergence: $\divg \vect{F} = \pdiff{F_x}{x} + \pdiff{F_y}{y} + \pdiff{F_z}{z}$
207
208Curl: $\curl \vect{F} = \grad \times \vect{F}$
209\end{example}
210
211\subsection{Norms and Inner Products}
212
213\begin{equation}
214    \norm{\vect{x}}_2 = \sqrt{\sum_{i=1}^n x_i^2}, \quad
215    \abs{x}, \quad
216    \inner{\vect{u}}{\vect{v}} = \sum_{i=1}^n u_i v_i
217\end{equation}
218
219\section{Machine Learning Notation}
220
221Using custom ML commands ensures consistency across documents:
222
223\begin{theorem}[Linear Regression]
224The optimal weights for linear regression minimize:
225\begin{equation}
226    \weights^* = \argmin_{\weights} \loss(\weights) = \argmin_{\weights} \sum_{i=1}^n (y_i - \trans{\weights}\features_i)^2
227\end{equation}
228\end{theorem}
229
230\begin{solution}
231Taking the gradient and setting it to zero:
232\begin{align}
233    \grad_{\weights} \loss(\weights) &= 0 \\
234    \weights^* &= \inv{(\trans{\mat{X}}\mat{X})}\trans{\mat{X}}\labels
235\end{align}
236\end{solution}
237
238\section{Commands with Optional Arguments}
239
240\subsection{Higher-Order Derivatives}
241
242The \texttt{derivative} command supports optional arguments for higher-order derivatives:
243
244\begin{itemize}
245    \item First derivative: $\derivative{f}{x}$
246    \item Second derivative: $\derivative[2]{f}{x}$
247    \item Third derivative: $\derivative[3]{f}{x}$
248    \item $n$-th derivative: $\derivative[n]{f}{x}$
249\end{itemize}
250
251\section{Custom Environments}
252
253\subsection{Theorem Environments}
254
255\begin{theorem}[Pythagorean Theorem]
256In a right-angled triangle, the square of the hypotenuse equals the sum of squares of the other two sides:
257\begin{equation}
258    c^2 = a^2 + b^2
259\end{equation}
260\end{theorem}
261
262\begin{proof}
263This can be proven using similar triangles or area calculations.
264\end{proof}
265
266\begin{lemma}
267If $f$ is differentiable at $x = a$, then $f$ is continuous at $x = a$.
268\end{lemma}
269
270\begin{definition}[Convex Function]
271A function $f: \R^n \to \R$ is convex if for all $\vect{x}, \vect{y} \in \R^n$ and $\lambda \in [0,1]$:
272\begin{equation}
273    f(\lambda\vect{x} + (1-\lambda)\vect{y}) \leq \lambda f(\vect{x}) + (1-\lambda)f(\vect{y})
274\end{equation}
275\end{definition}
276
277\subsection{Colored Boxes}
278
279\begin{notebox}
280Custom environments like this can highlight important information and make documents more readable.
281\end{notebox}
282
283\begin{warningbox}
284Be careful when redefining existing commands. Always check that your new definition doesn't break existing functionality.
285\end{warningbox}
286
287\begin{tipbox}
288Use \texttt{\textbackslash newcommand} for new commands and \texttt{\textbackslash renewcommand} to modify existing ones. Use \texttt{\textbackslash providecommand} if you want to define a command only if it doesn't already exist.
289\end{tipbox}
290
291\subsection{Algorithm Environment}
292
293\begin{algorithm}[Gradient Descent]
294\textbf{Input:} Initial point $\weights_0$, learning rate $\eta$, tolerance $\epsilon$
295
296\textbf{Output:} Optimal weights $\weights^*$
297
298\begin{enumerate}[label=\arabic*.]
299    \item Initialize $k = 0$
300    \item \textbf{while} $\norm{\grad \loss(\weights_k)} > \epsilon$ \textbf{do}
301    \begin{enumerate}[label=\alph*.]
302        \item Compute gradient: $\vect{g}_k = \grad \loss(\weights_k)$
303        \item Update weights: $\weights_{k+1} = \weights_k - \eta \vect{g}_k$
304        \item $k \leftarrow k + 1$
305    \end{enumerate}
306    \item \textbf{end while}
307    \item \textbf{return} $\weights_k$
308\end{enumerate}
309\end{algorithm}
310
311\section{Advanced Examples}
312
313\subsection{Neural Network Forward Pass}
314
315Using our custom commands, the forward pass equations become cleaner:
316
317\begin{align}
318    \vect{h}_1 &= \sigma(\mat{W}_1\features + \vect{b}_1) \\
319    \vect{h}_2 &= \sigma(\mat{W}_2\vect{h}_1 + \vect{b}_2) \\
320    \vect{y} &= \text{softmax}(\mat{W}_3\vect{h}_2 + \vect{b}_3)
321\end{align}
322
323where $\sigma$ is the activation function.
324
325\subsection{Optimization Problem}
326
327\begin{equation}
328    \min_{\weights \in \R^d} \left\{ \loss(\weights) = \E{\ell(f_{\weights}(\features), y)} + \lambda\norm{\weights}^2 \right\}
329\end{equation}
330
331The optimal solution satisfies:
332\begin{equation}
333    \weights^* = \argmin_{\weights} \left[ \loss(\weights) \right]
334\end{equation}
335
336\section{Best Practices}
337
338\begin{enumerate}
339    \item \textbf{Consistency:} Use custom commands to ensure consistent notation throughout your document
340    \item \textbf{Maintainability:} If you need to change notation, you only need to modify the command definition
341    \item \textbf{Readability:} Well-named commands make your \LaTeX\ source more readable
342    \item \textbf{Reusability:} Save common commands in a separate \texttt{.sty} file for reuse across projects
343    \item \textbf{Documentation:} Comment your custom commands to remember their purpose
344\end{enumerate}
345
346\begin{warningbox}
347Avoid redefining standard \LaTeX\ commands unless absolutely necessary, as this can lead to compatibility issues with packages.
348\end{warningbox}
349
350\section{Creating Your Own Style File}
351
352To reuse these commands across multiple documents, save them in a file called \texttt{mycommands.sty}:
353
354\begin{verbatim}
355\NeedsTeXFormat{LaTeX2e}
356\ProvidesPackage{mycommands}[2024/01/01 Custom Commands]
357
358% Include all your \newcommand and \newenvironment definitions here
359
360\endinput
361\end{verbatim}
362
363Then in your main document:
364
365\begin{verbatim}
366\documentclass{article}
367\usepackage{mycommands}
368\begin{document}
369...
370\end{document}
371\end{verbatim}
372
373\section{Compilation Notes}
374
375To compile this document:
376\begin{verbatim}
377pdflatex custom_macros.tex
378pdflatex custom_macros.tex  # Run twice for cross-references
379\end{verbatim}
380
381\end{document}