Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- <!DOCTYPE html>
- <html lang="en">
- <head>
- <meta charset="UTF-8">
- <meta name="viewport" content="width=device-width, initial-scale=1.0">
- <title>LOG // X-LINGUAL ALIGNMENT // EN-HI // PROCRUSTES</title>
- <style>
- /* --- Caves of Qud Inspired Terminal Style --- */
- /* Color Palette Variables (Based on Code list) */
- :root {
- --coq-r: #a64a2e; /* dark red */
- --coq-R: #d74200; /* red */
- --coq-o: #f15f22; /* dark orange */
- --coq-O: #e99f10; /* orange */
- --coq-w: #98875f; /* brown */
- --coq-W: #cfc041; /* gold/yellow */
- --coq-g: #009403; /* dark green */
- --coq-G: #00c420; /* green */
- --coq-b: #0048bd; /* dark blue */
- --coq-B: #0096ff; /* blue */
- --coq-c: #40a4b9; /* dark cyan */
- --coq-C: #77bfcf; /* cyan */
- --coq-m: #b154cf; /* dark magenta */
- --coq-M: #da5bd6; /* magenta */
- --coq-k: #0f3b3a; /* dark black/grey */
- --coq-K: #155352; /* dark grey/black */
- --coq-y: #b1c9c3; /* grey */
- --coq-Y: #ffffff; /* white */
- --coq-true-black: #000000;
- /* Glow Effects */
- --glow-cyan: 0 0 3px var(--coq-C), 0 0 5px var(--coq-c), 0 0 7px var(--coq-b);
- --glow-magenta: 0 0 3px var(--coq-M), 0 0 5px var(--coq-m), 0 0 7px var(--coq-b);
- --glow-green: 0 0 3px var(--coq-G), 0 0 5px var(--coq-g), 0 0 7px var(--coq-k);
- --glow-yellow: 0 0 3px var(--coq-W), 0 0 5px var(--coq-O), 0 0 7px var(--coq-w);
- --glow-red: 0 0 3px var(--coq-R), 0 0 5px var(--coq-o), 0 0 7px var(--coq-r);
- }
- /* Basic Reset & Font */
- * {
- margin: 0;
- padding: 0;
- box-sizing: border-box;
- }
- body {
- font-family: "Consolas", "Monaco", "Courier New", monospace;
- line-height: 1.7;
- color: var(--coq-y); /* Default grey text */
- background-color: var(--coq-k); /* Dark background */
- background-image: linear-gradient(rgba(0, 20, 0, 0.08) 1px, transparent 1px); /* Subtle scanlines */
- background-size: 100% 3px;
- padding-bottom: 50px; /* Ensure space for footer */
- }
- /* Container */
- .container {
- max-width: 950px;
- margin: 30px auto;
- padding: 25px;
- background-color: rgba(0, 0, 0, 0.4); /* Slightly transparent black background */
- border: 1px solid var(--coq-c); /* Dark Cyan border */
- box-shadow: 0 0 15px rgba(119, 191, 207, 0.2); /* Cyan glow */
- border-radius: 3px;
- }
- /* Headings */
- h1, h2, h3 {
- font-weight: normal; /* Monospace fonts are often bold by default */
- line-height: 1.4;
- margin-bottom: 15px;
- text-transform: uppercase; /* Terminal feel */
- }
- h1 {
- text-align: center;
- font-size: 2.1em;
- margin-bottom: 30px;
- border-bottom: 1px dashed var(--coq-C); /* Dashed Cyan underline */
- padding-bottom: 15px;
- color: var(--coq-Y); /* White */
- text-shadow: var(--glow-cyan);
- }
- h2 {
- font-size: 1.6em;
- margin-top: 35px;
- border-bottom: 1px solid var(--coq-g); /* Dark Green underline */
- padding-bottom: 8px;
- color: var(--coq-G); /* Bright Green */
- text-shadow: var(--glow-green);
- }
- h2::before {
- content: ">> "; /* Terminal prompt style */
- color: var(--coq-C);
- }
- h3 {
- font-size: 1.2em;
- color: var(--coq-M); /* Magenta */
- margin-top: 25px;
- text-shadow: var(--glow-magenta);
- }
- h3::before {
- content: " > ";
- color: var(--coq-m); /* Dark Magenta */
- }
- /* Paragraphs and Lists */
- p, li {
- margin-bottom: 14px;
- color: var(--coq-y); /* Grey text */
- }
- p::before {
- /* content: "│ "; */ /* Optional vertical line */
- /* color: var(--coq-K); */
- }
- ul, ol {
- margin-left: 30px;
- margin-bottom: 18px;
- list-style-type: none; /* Remove default bullets */
- }
- li::before {
- content: "* "; /* Use asterisk for list items */
- color: var(--coq-W); /* Gold/Yellow */
- margin-right: 8px;
- display: inline-block;
- width: 1em;
- margin-left: -1.2em; /* Align with text */
- }
- ol li::before {
- content: counter(list-item) ". "; /* Numbered lists */
- counter-increment: list-item;
- color: var(--coq-O); /* Orange */
- }
- /* Code Styling */
- code { /* Inline code */
- font-family: inherit; /* Keep terminal font */
- background-color: var(--coq-K); /* Dark Grey bg */
- padding: 2px 6px;
- border-radius: 2px;
- font-size: 0.95em;
- color: var(--coq-o); /* Dark Orange text */
- border: 1px dotted var(--coq-w); /* Brown dotted border */
- }
- pre { /* Code block */
- background-color: var(--coq-true-black); /* True Black bg */
- color: var(--coq-C); /* Cyan text */
- padding: 18px;
- border-radius: 0px; /* Sharp corners */
- border: 1px solid var(--coq-b); /* Dark Blue border */
- overflow-x: auto;
- margin: 15px 0 20px 0;
- font-size: 0.9em;
- line-height: 1.6;
- box-shadow: inset 0 0 8px rgba(0, 72, 189, 0.5); /* Inner blue glow */
- }
- pre code {
- background-color: transparent;
- color: inherit;
- padding: 0;
- font-size: inherit;
- border: none;
- }
- /* Links */
- a {
- color: var(--coq-B); /* Bright Blue */
- text-decoration: underline dashed;
- text-decoration-color: var(--coq-b);
- transition: all 0.2s ease-in-out;
- }
- a:hover {
- color: var(--coq-C); /* Cyan on hover */
- background-color: rgba(64, 164, 185, 0.1); /* Faint cyan bg */
- text-decoration: underline solid;
- text-shadow: var(--glow-cyan);
- }
- /* Tables */
- table {
- width: 100%;
- border-collapse: collapse;
- margin: 25px 0;
- font-size: 0.95em;
- border: 1px solid var(--coq-c); /* Dark Cyan border */
- box-shadow: 0 0 8px rgba(64, 164, 185, 0.15);
- }
- th, td {
- border: 1px dashed var(--coq-K); /* Dark Grey dashed lines */
- padding: 10px 14px;
- text-align: left;
- vertical-align: top; /* Align text top */
- }
- th {
- background-color: rgba(21, 83, 82, 0.5); /* Transparent Dark Black */
- color: var(--coq-C); /* Cyan header text */
- text-transform: uppercase;
- font-weight: normal;
- }
- tr:nth-child(even) td {
- background-color: rgba(15, 59, 58, 0.3); /* Very faint Dark Black bg for even rows */
- }
- tr:hover td {
- background-color: rgba(119, 191, 207, 0.1); /* Faint Cyan hover */
- color: var(--coq-Y);
- }
- td code { /* Make code in tables slightly different */
- color: var(--coq-R); /* Red code in tables */
- border-color: var(--coq-r);
- }
- /* Step Cards */
- .step-card {
- border: 1px solid var(--coq-m); /* Dark Magenta border */
- padding: 18px;
- margin-bottom: 20px;
- border-radius: 0; /* Sharp corners */
- background: linear-gradient(135deg, rgba(15, 59, 58, 0.3), rgba(21, 83, 82, 0.5)); /* Angled bg */
- box-shadow: inset 0 0 10px rgba(177, 84, 207, 0.2); /* Inner purple glow */
- }
- .step-card h3 {
- margin-top: 0;
- color: var(--coq-M); /* Keep Magenta */
- border-bottom: 1px dotted var(--coq-m);
- padding-bottom: 5px;
- }
- /* Footer */
- footer {
- text-align: center;
- margin-top: 40px;
- padding: 15px;
- border-top: 1px solid var(--coq-g); /* Dark Green top border */
- font-size: 0.9em;
- color: var(--coq-G); /* Green text */
- }
- footer::before, footer::after {
- content: " // ";
- color: var(--coq-g); /* Dark Green slashes */
- }
- /* Keyword Spans (will be generated by JS) */
- .keyword span {
- /* Add transition for potential future hover effects */
- /* transition: text-shadow 0.2s ease-in-out; */
- }
- /* Add a subtle glow to keywords */
- .keyword {
- text-shadow: 0 0 4px rgba(255, 255, 255, 0.1);
- }
- /* Responsive Design */
- @media (max-width: 768px) {
- body { font-size: 15px; }
- .container {
- margin: 15px;
- padding: 15px;
- border-width: 1px;
- }
- h1 { font-size: 1.8em; }
- h2 { font-size: 1.4em; }
- h3 { font-size: 1.1em; }
- table, th, td {
- font-size: 0.9em;
- padding: 8px;
- }
- pre { padding: 12px; font-size: 0.85em;}
- }
- </style>
- </head>
- <body>
- <div class="container">
- <h1>X-Lingual Vector Space Alignment // PROC LOG</h1>
- <section id="background">
- <h2>Initiate // Context Sync</h2>
- <p>
- <span class="keyword" data-template="hologram">Word embeddings</span> manifest as numerical vectors, encoding semantic resonance. Proximal vectors imply analogous meanings. Standard methodologies yield discrete vector manifolds per language substrate (e.g., English, Hindi).
- </p>
- <p>
- <span class="keyword" data-template="prismatic">Cross-lingual word embeddings</span> endeavor to reconcile these disparate spaces, projecting vectors into a unified continuum. Within this shared manifold, translation cognates (e.g., "<span class="keyword" data-template="rainbow">cat</span>" :: "<span class="keyword" data-template="qon">बिल्ली</span>") should exhibit high vector congruence. This paradigm unlocks potential for <span class="keyword" data-template="zetachrome">multilingual NLP</span> strata: machine translation, x-lingual retrieval, knowledge transference.
- </p>
- </section>
- <section id="objective">
- <h2>Directive // Project Goal</h2>
- <p>
- Primary objective: Implement and scrutinize a <span class="keyword" data-template="overloaded">supervised</span> methodology for aligning monolingual English & Hindi vector spaces. Employed technique: The <span class="keyword" data-template="psychalflesh">Procrustes</span> algorithm, learning a linear transformation matrix via a <span class="keyword" data-template="patchwork">bilingual lexicon</span>.
- </p>
- </section>
- <section id="methodology">
- <h2>Algorithm // Procrustes Alignment Protocol</h2>
- <p>
- The <span class="keyword" data-template="metamorphic">Procrustes</span> problem seeks an optimal <span class="keyword" data-template="crystalline">orthogonal linear transformation</span> matrix <code>W</code>, mapping source vectors (English, <code>X</code>) to maximally approximate target vectors (Hindi, <code>Y</code>), anchored by known translation pairings from the lexicon.
- </p>
- <p>
- Optimization target: Minimize the squared Frobenius norm divergence:
- </p>
- <pre><code class="language-math">argmin ||XW - Y||² s.t. WᵀW = I</code></pre>
- <p>
- Here, <code>X</code>, <code>Y</code> represent matrices of embeddings for anchor pairs. Constraint <code>WᵀW = I</code> enforces <span class="keyword" data-template="phase-harmonic">orthogonality</span>, preserving Euclidean distances and relative vector orientations – the internal geometry of the source space remains invariant post-transformation.
- </p>
- <p>
- Analytic solution achieved via <span class="keyword" data-template="mercurial">Singular Value Decomposition (SVD)</span>. Compute SVD of <code>YᵀX</code> as <code>UΣVᵀ</code>. The optimal orthogonal map <code>W</code> emerges as:
- </p>
- <pre><code class="language-math">W = UVᵀ</code></pre>
- <p>
- Learned matrix <code>W</code>, derived from the training lexicon, is then broadcast across *all* source language vectors, projecting them into the shared alignment space: <code>X_aligned = X_source @ W</code>.
- </p>
- </section>
- <section id="implementation">
- <h2>Execution Log // Pipeline Steps</h2>
- <p>Systematic procedure enacted via <span class="keyword" data-template="agolgot">Python</span>:</p>
- <div class="step-card">
- <h3>1. Data Ingestion & Prep</h3>
- <ul>
- <li>Acquired initial 10k article fragments per language substrate (EN:<code>20220301.en</code>, HI:<code>20220301.hi</code>) from <span class="keyword" data-template="starry">Wikipedia</span> archives via Hugging Face <code>datasets</code> interface.</li>
- <li>Applied text normalization routines (case folding, non-alphanumeric purge) -> temporary data streams.</li>
- <li>Retrieved EN-HI bilingual lexicon (<a href="https://github.com/facebookresearch/MUSE#ground-truth-bilingual-dictionaries" target="_blank" rel="noopener noreferrer"><span class="keyword" data-template="bethsaida">MUSE</span></a> artifact), bifurcated into train/test partitions.</li>
- </ul>
- </div>
- <div class="step-card">
- <h3>2. Monolingual Vector Synthesis</h3>
- <ul>
- <li>Synthesized 300-D <span class="keyword" data-template="plasma">FastText</span> (skipgram config) models independently for EN & HI streams.</li>
- <li>Parameters: <code>minCount</code>=5, <code>epochs</code>=5.</li>
- <li>Extracted vector representations for top 100k lexical items per language manifold based on frequency heuristics.</li>
- </ul>
- </div>
- <div class="step-card">
- <h3>3. Vector Space Conditioning</h3>
- <ul>
- <li>Applied standard pre-alignment vector conditioning:
- <ol>
- <li><span class="keyword" data-template="ironshank">Mean Centering:</span> Nullified mean vector bias within each set.</li>
- <li><span class="keyword" data-template="refractive">Unit Length Normalization:</span> Scaled all vectors to L2 norm = 1.</li>
- </ol>
- Critical for Procrustes stability and cosine similarity validity.
- </li>
- </ul>
- </div>
- <div class="step-card">
- <h3>4. Procrustes Transformation Learning</h3>
- <ul>
- <li>Loaded MUSE training lexicon, filtering pairs absent from the top-100k vocabulary strata.</li>
- <li>Constructed parallel matrices <code>X_train</code>, <code>Y_train</code> from normalized vectors corresponding to filtered lexicon entries.</li>
- <li>Computed optimal orthogonal transformation <code>W</code> via <span class="keyword" data-template="ubernostrum">SVD</span> solution (<code>W = UVᵀ</code> where <code>UΣVᵀ = SVD(Y_trainᵀ @ X_train)</code>).</li>
- <li>Projected the entire normalized English vector space using the learned map: <code>embeddings_src_aligned = embeddings_src_norm @ W</code>.</li>
- </ul>
- </div>
- <div class="step-card">
- <h3>5. Alignment Quality Assessment</h3>
- <ul>
- <li><strong><span class="keyword" data-template="fiery">Word Translation Task</span>:</strong>
- <ul>
- <li>Utilized filtered MUSE test lexicon.</li>
- <li>For each EN source term, retrieved its aligned vector representation.</li>
- <li>Computed <span class="keyword" data-template="watery">cosine similarity</span> against *all* normalized HI target vectors.</li>
- <li>Identified Top-1/Top-5 HI nearest neighbors based on similarity maxima.</li>
- <li>Calculated <span class="keyword" data-template="sunslag">Precision@1 (P@1)</span> & <span class="keyword" data-template="sunslag">Precision@5 (P@5)</span>: % concordance of predicted neighbors with ground-truth translation within k={1, 5}.</li>
- </ul>
- </li>
- <li><strong><span class="keyword" data-template="chiral">Semantic Congruence Probe</span>:</strong>
- <ul>
- <li>Calculated cosine similarities for curated pairs (translations, related concepts, unrelated concepts) between aligned EN vectors and original HI vectors for qualitative validation.</li>
- </ul>
- </li>
- <li><strong><span class="keyword" data-template="metachrome">Ablation Analysis (Dictionary Size)</span>:</strong>
- <ul>
- <li>Re-executed alignment pipeline (Steps 4 & 5) with incrementally smaller subsets of the training lexicon (e.g., 500, 1k, 2k, 5k pairs).</li>
- <li>Tracked P@1/P@5 sensitivity to the volume of supervisory signal (lexicon size).</li>
- </ul>
- </li>
- </ul>
- </div>
- </section>
- <section id="results">
- <h2>Output // Assessment Metrics</h2>
- <h3>Word Translation Accuracy (Full Training Lexicon)</h3>
- <p>Alignment efficacy derived from complete filtered MUSE training data, evaluated on test partition:</p>
- <!-- NOTE: Replace these placeholder values with your actual results -->
- <table>
- <thead>
- <tr>
- <th>Metric</th>
- <th>Score (%)</th>
- </tr>
- </thead>
- <tbody>
- <tr>
- <td>Precision@1</td>
- <td><span class="keyword" data-template="rocket">XX.XX</span> %</td> <!-- Placeholder -->
- </tr>
- <tr>
- <td>Precision@5</td>
- <td><span class="keyword" data-template="rocket">YY.YY</span> %</td> <!-- Placeholder -->
- </tr>
- </tbody>
- </table>
- <p><em>(System Note: Values contingent on vector synthesis quality & vocab/lexicon overlap. Placeholders active.)</em></p>
- <h3>Cross-Lingual Semantic Similarity Samples</h3>
- <p>Cosine similarity readouts between selected aligned-EN :: original-HI vector pairs:</p>
- <!-- NOTE: Replace these placeholder values with your actual results -->
- <table>
- <thead>
- <tr>
- <th>EN Term (Aligned)</th>
- <th>HI Term (Original)</th>
- <th>Cosine Sim.</th>
- <th>Expected Relation</th>
- </tr>
- </thead>
- <tbody>
- <tr>
- <td><code>king</code></td>
- <td><code>राजा</code></td>
- <td><span class="keyword" data-template="lovesickness">0.XXX</span></td> <!-- Placeholder -->
- <td>Translation</td>
- </tr>
- <tr>
- <td><code>queen</code></td>
- <td><code>रानी</code></td>
- <td><span class="keyword" data-template="lovesickness">0.XXX</span></td> <!-- Placeholder -->
- <td>Translation</td>
- </tr>
- <tr>
- <td><code>water</code></td>
- <td><code>पानी</code></td>
- <td><span class="keyword" data-template="watery">0.XXX</span></td> <!-- Placeholder -->
- <td>Translation</td>
- </tr>
- <tr>
- <td><code>king</code></td>
- <td><code>queen</code></td>
- <td><span class="keyword" data-template="amorous">0.XXX</span></td> <!-- Placeholder -->
- <td>Related (EN)</td>
- </tr>
- <tr>
- <td><code>राजा</code></td>
- <td><code>रानी</code></td>
- <td><span class="keyword" data-template="amorous">0.XXX</span></td> <!-- Placeholder -->
- <td>Related (HI via aligned EN)</td>
- </tr>
- <tr>
- <td><code>king</code></td>
- <td><code>पानी</code></td>
- <td><span class="keyword" data-template="glotrot">0.XXX</span></td> <!-- Placeholder -->
- <td>Unrelated</td>
- </tr>
- </tbody>
- </table>
- <p>Elevated similarity scores for cognates/related concepts signal successful spatial reconciliation.</p>
- </section>
- <section id="ablation">
- <h2>Sensitivity Analysis // Lexicon Size Impact</h2>
- <p>
- Investigating alignment fidelity dependence on supervisory signal volume (training pair count). Measured via word translation accuracy on static test set.
- </p>
- <!-- NOTE: Replace these placeholder values with your actual results -->
- <table>
- <thead>
- <tr>
- <th>Training Pairs</th>
- <th>Precision@1 (%)</th>
- <th>Precision@5 (%)</th>
- </tr>
- </thead>
- <tbody>
- <tr>
- <td>500</td>
- <td><span class="keyword" data-template="nervous">AA.AA</span> %</td> <!-- Placeholder -->
- <td><span class="keyword" data-template="nervous">BB.BB</span> %</td> <!-- Placeholder -->
- </tr>
- <tr>
- <td>1000</td>
- <td><span class="keyword" data-template="gaslight">CC.CC</span> %</td> <!-- Placeholder -->
- <td><span class="keyword" data-template="gaslight">DD.DD</span> %</td> <!-- Placeholder -->
- </tr>
- <tr>
- <td>2000</td>
- <td><span class="keyword" data-template="brainbrine">EE.EE</span> %</td> <!-- Placeholder -->
- <td><span class="keyword" data-template="brainbrine">FF.FF</span> %</td> <!-- Placeholder -->
- </tr>
- <tr>
- <td>5000 (Full)</td>
- <td><span class="keyword" data-template="overloaded">XX.XX</span> %</td> <!-- Placeholder -->
- <td><span class="keyword" data-template="overloaded">YY.YY</span> %</td> <!-- Placeholder -->
- </tr>
- </tbody>
- </table>
- <p>
- Observation: Augmenting training pairs generally enhances alignment quality, yielding superior translation metrics, though returns may diminish asymptotically.
- </p>
- </section>
- <section id="tools">
- <h2>Toolkit // Libraries & Artifacts</h2>
- <ul>
- <li><strong><span class="keyword" data-template="pythonic">Python 3</span>:</strong> Core execution environment.</li>
- <li><strong><span class="keyword" data-template="plasma">FastText</span>:</strong> Monolingual vector synthesis engine.</li>
- <li><strong>Hugging Face <code>datasets</code>:</strong> Corpus acquisition interface (<span class="keyword" data-template="starry">Wikipedia</span>).</li>
- <li><strong><span class="keyword" data-template="nanotech">NumPy</span>:</strong> N-dimensional array manipulation & linear algebra substrate.</li>
- <li><strong><span class="keyword" data-template="qon">SciPy</span>:</strong> <span class="keyword" data-template="mercurial">SVD</span> computation module (<code>linalg.svd</code>).</li>
- <li><strong><span class="keyword" data-template="psionic">Scikit-learn</span>:</strong> Cosine similarity utility (alternative implementation).</li>
- <li><strong><span class="keyword" data-template="webclient">Requests</span>:</strong> HTTP retrieval agent (MUSE lexicon).</li>
- <li><strong><span class="keyword" data-template="bethsaida">MUSE Bilingual Dictionaries</span>:</strong> Ground-truth translation pairs (supervisory signal).</li>
- </ul>
- </section>
- <section id="conclusion">
- <h2>Termination // Synopsis</h2>
- <p>
- Project successfully deployed the <span class="keyword" data-template="psychalflesh">Procrustes</span> supervised alignment protocol onto EN/HI <span class="keyword" data-template="plasma">FastText</span> vectors derived from <span class="keyword" data-template="starry">Wikipedia</span> data. Learning an <span class="keyword" data-template="crystalline">orthogonal transformation</span> from a bilingual lexicon yielded a unified vector manifold exhibiting proximity between translation cognates. Efficacy validated via word translation metrics (P@1, P@5) and semantic similarity probes. <span class="keyword" data-template="metachrome">Ablation analysis</span> confirmed positive correlation between supervisory lexicon scale and alignment robustness. System achieved specified objectives.
- </p>
- </section>
- <footer>
- Alignment Protocol Log // End Transmission
- </footer>
- </div>
- <script>
- document.addEventListener('DOMContentLoaded', () => {
- // --- CoQ Color Definitions ---
- const colors = {
- 'r': '#a64a2e', 'R': '#d74200', 'o': '#f15f22', 'O': '#e99f10',
- 'w': '#98875f', 'W': '#cfc041', 'g': '#009403', 'G': '#00c420',
- 'b': '#0048bd', 'B': '#0096ff', 'c': '#40a4b9', 'C': '#77bfcf',
- 'm': '#b154cf', 'M': '#da5bd6', 'k': '#0f3b3a', 'K': '#155352',
- 'y': '#b1c9c3', 'Y': '#ffffff'
- };
- // --- CoQ Template Definitions (Subset) ---
- const templates = {
- 'hologram': { colors: 'b-B-C-c', type: 'sequence' },
- 'ydfreehold': { colors: 'r-R-k-c-C-W-W-C-c-r-R', type: 'sequence' },
- 'purple': { colors: 'm', type: 'sequence' }, // Effectively solid
- 'paisley': { colors: 'm-M-Y-M-m', type: 'sequence' },
- 'biomech': { colors: 'w-w-r-r-r-w-r-r', type: 'sequence' },
- 'rainbow': { colors: 'r-R-W-G-B-b-m', type: 'alternation' }, // Use alternation for more variance
- 'important': { colors: 'W', type: 'sequence' }, // Solid Gold/Yellow
- 'metamorphic': { colors: 'y-y-y-Y-Y-Y-M-M-M-m-m-m-m', type: 'sequence' },
- 'ubernostrum': { colors: 'c-g-G-W-w-c-C-G-g-w-W', type: 'sequence' },
- 'rocket': { colors: 'Y-W-R-R-r-y', type: 'alternation' },
- 'visage': { colors: 'R-r-b-B-Y-y', type: 'sequence' },
- 'dreamsmoke': { colors: 'b-b-b-b-y-Y-Y-W-w-b-b-b', type: 'sequence' },
- 'polarized': { colors: 'K-y-Y-y-K-y-Y-y-K', type: 'alternation' },
- 'ironshank': { colors: 'K-y-Y-y', type: 'sequence' },
- 'dark fiery': { colors: 'r-R-W-R-r', type: 'alternation' },
- 'bethsaida': { colors: 'w-W-C-c-m-c-C-W-w', type: 'sequence' },
- 'plasma': { colors: 'g-G-Y-Y-G-g', type: 'sequence' },
- 'prismatic': { colors: 'r-R-W-G-B-b-m', type: 'sequence' },
- 'lovesickness': { colors: 'r-R-M-m-r-R-M', type: 'sequence' },
- 'fiery': { colors: 'R', type: 'sequence' }, // Solid Red
- 'qon': { colors: 'm-b-B', type: 'sequence' },
- 'agolgot': { colors: 'K-g-w-m-w-g-K', type: 'sequence' },
- 'zetachrome': { colors: 'm-M-Y-C-c-c-C-Y-M-m', type: 'alternation' },
- 'watery': { colors: 'B-C-Y-C-B', type: 'alternation' },
- 'psychalflesh': { colors: 'w-w-w-r-R-M-M-m-M-M-R-r-w-w-w-w', type: 'sequence' },
- 'starry': { colors: 'K-Y-K-K-Y-K', type: 'sequence' },
- 'blaze': { colors: 'r-r-R-W-Y', type: 'sequence' },
- 'amorous': { colors: 'r-R-M-m', type: 'alternation' },
- 'mercurial': { colors: 'c-c-C-W-Y-W-C-c-c', type: 'alternation' },
- 'shade': { colors: 'y-K-c-b-B-y-C-y-K', type: 'sequence' },
- 'crystalline': { colors: 'm-m-m-b-B-Y-B-b-m-m-m', type: 'sequence' },
- 'phase-harmonic': { colors: 'Y-y-m-y-K', type: 'sequence' },
- 'refractive': { colors: 'y-Y', type: 'sequence' },
- 'chiral': { colors: 'B-b-c-C-M-m-k-m-M-C-c-b', type: 'sequence' },
- 'patchwork': { colors: 'W-w-r-R-W-w-b-B-W', type: 'sequence' },
- 'overloaded': { colors: 'y-y-w-W-R-W-w-y-y', type: 'alternation' },
- 'sunslag': { colors: 'r-W-Y-Y-Y-W-r', type: 'sequence' },
- 'metachrome': { colors: 'w-W-Y-C-c-c-C-Y-W-w', type: 'alternation' },
- 'nanotech': { colors: 'K-K-y-K', type: 'sequence' },
- 'qon': { colors: 'm-b-B', type: 'sequence' }, // Hindi word example
- 'pythonic': { colors: 'B-b-W-y-Y-W', type: 'sequence'}, // For Python
- 'webclient': { colors: 'C-c-B-b-Y', type: 'alternation'}, // For Requests
- 'psionic': { colors: 'b-B-C-c-b-B-C', type: 'alternation'}, // Scikit-learn
- 'nervous': { colors: 'g-g-w-W-w-g-g', type: 'sequence'},
- 'gaslight': { colors: 'g-g-w-W-w-g-g', type: 'alternation'},
- 'brainbrine': { colors: 'g-g-g-w-W-W-W-w-g-g-g', type: 'sequence'},
- 'glotrot': { colors: 'K-K-r-R-r', type: 'sequence'},
- // Add more templates if needed
- };
- function applyColorTemplate(element, templateName) {
- const template = templates[templateName] || templates['rainbow']; // Default to rainbow
- if (!template) return; // Safety check
- const colorCodes = template.colors.split('-').map(code => colors[code] || colors['y']); // Get hex codes, default to grey
- if (colorCodes.length === 0) return;
- const text = element.innerText;
- const letters = text.split('');
- let html = '';
- let colorIndex = 0;
- letters.forEach((letter, i) => {
- if (letter.trim() === '') { // Keep spaces uncolored
- html += letter;
- } else {
- if (template.type === 'sequence') {
- colorIndex = i % colorCodes.length;
- } else if (template.type === 'alternation') {
- // Simple alternation cycle - can be made more complex
- colorIndex = i % colorCodes.length;
- } else { // Default to sequence if type is unknown
- colorIndex = i % colorCodes.length;
- }
- const color = colorCodes[colorIndex];
- html += `<span style="color: ${color};">${letter}</span>`;
- }
- });
- element.innerHTML = html;
- }
- // --- Apply Templates ---
- const keywords = document.querySelectorAll('.keyword');
- keywords.forEach(el => {
- const templateName = el.dataset.template;
- if (templateName) {
- applyColorTemplate(el, templateName);
- } else {
- applyColorTemplate(el, 'rainbow'); // Apply default if no template specified
- }
- });
- // Add some dynamic effect (optional - e.g., slight glow pulse on h1)
- const mainTitle = document.querySelector('h1');
- if (mainTitle) {
- // Example: simple pulse (can be much more elaborate)
- // mainTitle.style.animation = 'pulseGlow 3s infinite alternate';
- }
- // Define pulse animation if used:
- // @keyframes pulseGlow { from { text-shadow: var(--glow-cyan); } to { text-shadow: 0 0 5px var(--coq-C), 0 0 8px var(--coq-c), 0 0 12px var(--coq-b), 0 0 15px var(--coq-B); } }
- });
- </script>
- </body>
- </html>
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement