Compare commits

...

11 Commits

Author SHA1 Message Date
N0\A
42864c5491 Should fully work now 2026-01-09 00:47:49 +01:00
N0\A
3131fd3531 Hopefully? 2026-01-09 00:45:01 +01:00
N0\A
d936ac872a Maybe now? 2026-01-09 00:40:25 +01:00
N0\A
019f85a29a HTTPS fix 2026-01-09 00:35:21 +01:00
N0\A
87c56ba2f8 Better suggestion accept on mobile and word wrap offset fix 2026-01-09 00:24:38 +01:00
5bd7ccf76a Update index.html 2026-01-07 11:17:49 +01:00
630d8ebdc0 Update api.html 2026-01-07 11:02:22 +01:00
6f0977be50 Mobile UI fix 2026-01-07 10:57:57 +01:00
86c20628c8 API page scrolling fix 2026-01-07 10:42:17 +01:00
56b4e056c3 API page 2026-01-07 10:40:22 +01:00
b5d5195f8f Layout fix 2026-01-07 10:39:32 +01:00
7 changed files with 662 additions and 246 deletions

View File

@@ -5,4 +5,4 @@ COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY . .
EXPOSE 8000
CMD ["uvicorn", "api:app", "--host", "0.0.0.0", "--port", "8000"]
CMD ["uvicorn", "api:app", "--host", "0.0.0.0", "--port", "8000", "--proxy-headers", "--forwarded-allow-ips", "*"]

View File

@@ -1,3 +0,0 @@
TODO:
- Fix the suggestion text not scrolling
- Add a /api page

48
api.py
View File

@@ -1,13 +1,15 @@
import os
import uvicorn
from fastapi import FastAPI, Body
from fastapi.staticfiles import StaticFiles
from fastapi.responses import FileResponse
from pydantic import BaseModel
import sys
import uvicorn
from fastapi import Body, FastAPI
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import FileResponse
from fastapi.staticfiles import StaticFiles
from pydantic import BaseModel
# Import core LLM logic
from llm import load_or_train_model, generate_text, SOURCES_DIR
from llm import SOURCES_DIR, generate_text, load_or_train_model
# --- Configuration ---
# Models to pre-load on startup
@@ -18,19 +20,31 @@ UI_DIR = "ui"
# Cache for loaded models: {n: model}
MODEL_CACHE = {}
# --- Pydantic Models ---
class PredictRequest(BaseModel):
prompt: str
temperature: float = 0.7
n: int = 3
temperature: float = 1.6
n: int = 4
length: int = 5
class PredictResponse(BaseModel):
prediction: str
# --- FastAPI App ---
app = FastAPI()
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
def get_model_for_n(n: int):
"""
Retrieves the model for a specific N from cache, or loads/trains it.
@@ -44,6 +58,7 @@ def get_model_for_n(n: int):
MODEL_CACHE[n] = model
return model
@app.on_event("startup")
def startup_event():
"""
@@ -54,6 +69,7 @@ def startup_event():
get_model_for_n(n)
print(f"Models for N={PRELOAD_N_GRAMS} loaded. Server is ready.")
@app.post("/api/predict", response_model=PredictResponse)
async def predict(request: PredictRequest):
"""
@@ -71,22 +87,34 @@ async def predict(request: PredictRequest):
model,
start_prompt=request.prompt,
length=length,
temperature=request.temperature
temperature=request.temperature,
)
return PredictResponse(prediction=prediction)
@app.get("/api")
async def api_docs():
"""
API documentation page.
"""
return FileResponse(os.path.join(UI_DIR, "api.html"))
# --- Static Files and Root ---
app.mount("/ui", StaticFiles(directory=UI_DIR), name="ui")
@app.get("/")
async def read_root():
return FileResponse(os.path.join(UI_DIR, "index.html"))
def run():
# Read port from environment variable, default to 8000
port = int(os.environ.get("PORT", 8000))
uvicorn.run(app, host="0.0.0.0", port=port)
uvicorn.run(app, host="0.0.0.0", port=port, proxy_headers=True, forwarded_allow_ips="*")
if __name__ == "__main__":
run()

390
ui/api.html Normal file
View File

@@ -0,0 +1,390 @@
<!doctype html>
<html lang="en" class="dark">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Kreatyw - API Documentation</title>
<link rel="stylesheet" href="/ui/style.css?v=2" />
<link rel="icon" type="image/x-icon" href="/ui/favicon.ico" />
<style>
body {
overflow: auto !important;
height: auto !important;
}
.doc-container {
max-width: 900px;
margin: 0 auto;
padding: 3rem 2rem;
}
.doc-header {
margin-bottom: 3rem;
border-bottom: 1px solid var(--border);
padding-bottom: 2rem;
}
.doc-header h1 {
font-size: 2.5rem;
margin: 0 0 0.5rem 0;
font-weight: 700;
}
.doc-header p {
color: var(--muted-foreground);
font-size: 1.125rem;
margin: 0;
}
.section {
margin-bottom: 3rem;
}
.section h2 {
font-size: 1.75rem;
margin: 0 0 1rem 0;
font-weight: 600;
}
.section h3 {
font-size: 1.25rem;
margin: 2rem 0 1rem 0;
font-weight: 600;
color: var(--muted-foreground);
}
.section p {
line-height: 1.7;
color: var(--foreground);
margin: 0 0 1rem 0;
}
.code-block {
background-color: var(--secondary);
border: 1px solid var(--border);
border-radius: var(--radius);
padding: 1.5rem;
overflow-x: auto;
margin: 1rem 0;
}
.code-block pre {
margin: 0;
font-family: "SF Mono", "Fira Code", monospace;
font-size: 0.875rem;
line-height: 1.6;
color: var(--foreground);
}
.inline-code {
background-color: var(--secondary);
padding: 0.2rem 0.4rem;
border-radius: 4px;
font-family: "SF Mono", "Fira Code", monospace;
font-size: 0.875em;
color: var(--foreground);
}
.param-table {
width: 100%;
border-collapse: collapse;
margin: 1rem 0;
}
.param-table th,
.param-table td {
text-align: left;
padding: 0.75rem;
border-bottom: 1px solid var(--border);
}
.param-table th {
font-weight: 600;
color: var(--muted-foreground);
font-size: 0.875rem;
text-transform: uppercase;
letter-spacing: 0.05em;
}
.param-table td {
color: var(--foreground);
}
.param-table tr:last-child td {
border-bottom: none;
}
.badge {
display: inline-block;
padding: 0.25rem 0.5rem;
border-radius: 4px;
font-size: 0.75rem;
font-weight: 600;
text-transform: uppercase;
}
.badge-post {
background-color: #10b981;
color: white;
}
.badge-required {
background-color: #ef4444;
color: white;
}
.badge-optional {
background-color: var(--secondary);
color: var(--muted-foreground);
}
.back-link {
display: inline-flex;
align-items: center;
gap: 0.5rem;
color: var(--foreground);
text-decoration: none;
margin-bottom: 2rem;
font-weight: 500;
transition: opacity 0.2s;
}
.back-link:hover {
opacity: 0.8;
}
</style>
</head>
<body>
<div class="doc-container">
<a href="/" class="back-link">
<svg
xmlns="http://www.w3.org/2000/svg"
width="20"
height="20"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
>
<line x1="19" y1="12" x2="5" y2="12"></line>
<polyline points="12 19 5 12 12 5"></polyline>
</svg>
Back to Editor
</a>
<div class="doc-header">
<h1>Kreatyw API</h1>
<p>Text generation API powered by N-gram language models</p>
</div>
<div class="section">
<h2>Overview</h2>
<p>
The Kreatyw API provides a simple REST endpoint for
generating text continuations using N-gram language models.
The API uses Markov chains trained on source texts to
predict and generate coherent text sequences.
</p>
</div>
<div class="section">
<h2>Base URL</h2>
<div class="code-block">
<pre>https://kreatyw.krzak.org</pre>
</div>
</div>
<div class="section">
<h2>Endpoints</h2>
<h3>POST /api/predict</h3>
<p>Generate text continuation based on a given prompt.</p>
<h3>Request Body</h3>
<table class="param-table">
<thead>
<tr>
<th>Parameter</th>
<th>Type</th>
<th>Required</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td><span class="inline-code">prompt</span></td>
<td>string</td>
<td>
<span class="badge badge-required"
>Required</span
>
</td>
<td>The starting text to continue from</td>
</tr>
<tr>
<td><span class="inline-code">n</span></td>
<td>integer</td>
<td>
<span class="badge badge-optional"
>Optional</span
>
</td>
<td>
N-gram size (2-5). Default: 4. Higher values
produce more coherent but less creative text.
</td>
</tr>
<tr>
<td>
<span class="inline-code">temperature</span>
</td>
<td>float</td>
<td>
<span class="badge badge-optional"
>Optional</span
>
</td>
<td>
Sampling temperature (0.1-2.0). Default: 1.6.
Higher values increase randomness.
</td>
</tr>
<tr>
<td><span class="inline-code">length</span></td>
<td>integer</td>
<td>
<span class="badge badge-optional"
>Optional</span
>
</td>
<td>
Number of words to generate (1-500). Default: 5.
</td>
</tr>
</tbody>
</table>
<h3>Response</h3>
<table class="param-table">
<thead>
<tr>
<th>Field</th>
<th>Type</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td><span class="inline-code">prediction</span></td>
<td>string</td>
<td>The generated text continuation</td>
</tr>
</tbody>
</table>
<h3>Example Request</h3>
<div class="code-block">
<pre>
curl -X POST http://localhost:8000/api/predict \
-H "Content-Type: application/json" \
-d '{
"prompt": "Kiedyś tak było",
"n": 4,
"temperature": 1.2,
"length": 20
}'</pre
>
</div>
<h3>Example Response</h3>
<div class="code-block">
<pre>
{
"prediction": "przezroczyste, że prawie ich dostrzec nie mógł. Słysząc bowiem tyle o jej egzystencji. Zaiste z pogardą arcykapłańskich święceń i źle traktujesz sługi boże."
}</pre
>
</div>
<h3>JavaScript Example</h3>
<div class="code-block">
<pre>
const response = await fetch('/api/predict', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
prompt: 'Kiedyś tak było',
n: 3,
temperature: 0.8,
length: 15
})
});
const data = await response.json();
console.log(data.prediction);</pre
>
</div>
<h3>Python Example</h3>
<div class="code-block">
<pre>
import requests
response = requests.post('http://localhost:8000/api/predict',
json={
'prompt': 'Kiedyś tak było',
'n': 4,
'temperature': 1.0,
'length': 25
}
)
result = response.json()
print(result['prediction'])</pre
>
</div>
</div>
<div class="section">
<h2>Model Parameters</h2>
<h3>N-gram Size (n)</h3>
<p>
Controls the context window size. Higher values use more
context words to predict the next word:
</p>
<ul style="line-height: 1.8; color: var(--foreground)">
<li>
<strong>n=2 (Bigram):</strong> Uses 1 previous word for
context. Very creative but less coherent.
</li>
<li>
<strong>n=3 (Trigram):</strong> Uses 2 previous words.
Balanced creativity and coherence.
</li>
<li>
<strong>n=4 (Tetragram):</strong> Uses 3 previous words.
More coherent, less random.
</li>
<li>
<strong>n=5 (Pentagram):</strong> Uses 4 previous words.
Most coherent, closest to training data.
</li>
</ul>
<h3>Temperature</h3>
<p>Controls the randomness of predictions:</p>
<ul style="line-height: 1.8; color: var(--foreground)">
<li>
<strong>Low (0.1-0.5):</strong> More deterministic,
picks most likely words.
</li>
<li>
<strong>Medium (0.6-1.0):</strong> Balanced between
predictability and creativity.
</li>
<li>
<strong>High (1.1-2.0):</strong> More random and
creative, may produce unexpected results.
</li>
</ul>
</div>
<div class="section">
<h2>Error Handling</h2>
<p>The API returns standard HTTP status codes:</p>
<ul style="line-height: 1.8; color: var(--foreground)">
<li><strong>200 OK:</strong> Request successful</li>
<li>
<strong>422 Unprocessable Entity:</strong> Invalid
request parameters
</li>
<li>
<strong>500 Internal Server Error:</strong> Server error
</li>
</ul>
</div>
</div>
</body>
</html>

View File

@@ -4,7 +4,7 @@
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Kreatyw</title>
<link rel="stylesheet" href="/ui/style.css" />
<link rel="stylesheet" href="/ui/style.css?v=2" />
<link rel="icon" type="image/x-icon" href="/ui/favicon.ico" />
@@ -144,31 +144,12 @@
id="editor"
spellcheck="false"
autofocus
placeholder="Start writing something poetic..."
placeholder="Start typing here..."
></textarea>
<button
id="accept-suggestion-btn"
class="btn-floating"
title="Accept Suggestion"
>
<svg
xmlns="http://www.w3.org/2000/svg"
width="24"
height="24"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
>
<polyline points="20 6 9 17 4 12"></polyline>
</svg>
</button>
</div>
</div>
</main>
</div>
<script src="/ui/script.js"></script>
<script src="/ui/script.js?v=2"></script>
</body>
</html>

View File

@@ -1,171 +1,184 @@
document.addEventListener("DOMContentLoaded", () => {
const editor = document.getElementById("editor");
const suggestionOverlay = document.getElementById("suggestion-overlay");
const status = document.getElementById("status");
const statusIndicator = document.querySelector(".status-indicator");
document.addEventListener('DOMContentLoaded', () => {
const editor = document.getElementById('editor');
const suggestionOverlay = document.getElementById('suggestion-overlay');
const status = document.getElementById('status');
const statusIndicator = document.querySelector('.status-indicator');
// Controls
const nGramSelect = document.getElementById("n-gram");
const nValDisplay = document.getElementById("n-val");
const tempInput = document.getElementById("temperature");
const tempValDisplay = document.getElementById("temp-val");
const lengthInput = document.getElementById("length");
const lengthValDisplay = document.getElementById("length-val");
const generateBtn = document.getElementById("generate-more-btn");
const sidebarToggle = document.getElementById("sidebar-toggle");
const sidebar = document.getElementById("sidebar");
// Controls
const nGramSelect = document.getElementById('n-gram');
const nValDisplay = document.getElementById('n-val');
const tempInput = document.getElementById('temperature');
const tempValDisplay = document.getElementById('temp-val');
const lengthInput = document.getElementById('length');
const lengthValDisplay = document.getElementById('length-val');
const generateBtn = document.getElementById('generate-more-btn');
const sidebarToggle = document.getElementById('sidebar-toggle');
const sidebar = document.getElementById('sidebar');
const acceptSuggestionBtn = document.getElementById('accept-suggestion-btn');
let currentSuggestion = "";
let isFetching = false;
let debounceTimer;
let currentSuggestion = '';
let isFetching = false;
let debounceTimer;
// --- UI Logic ---
// --- UI Logic ---
const updateUI = () => {
nValDisplay.textContent = nGramSelect.value;
tempValDisplay.textContent = tempInput.value;
lengthValDisplay.textContent = lengthInput.value;
};
const updateUI = () => {
nValDisplay.textContent = nGramSelect.value;
tempValDisplay.textContent = tempInput.value;
lengthValDisplay.textContent = lengthInput.value;
};
sidebarToggle.addEventListener("click", () => {
sidebar.classList.toggle("open");
});
sidebarToggle.addEventListener('click', () => {
sidebar.classList.toggle('open');
});
const closeSidebarOnMobile = () => {
if (window.innerWidth <= 768) {
sidebar.classList.remove("open");
}
};
const closeSidebarOnMobile = () => {
if (window.innerWidth <= 768) {
sidebar.classList.remove('open');
}
};
tempInput.addEventListener('input', updateUI);
lengthInput.addEventListener('input', updateUI);
nGramSelect.addEventListener('change', () => {
updateUI();
triggerUpdate();
});
const triggerUpdate = () => {
currentSuggestion = '';
updateSuggestion();
const prompt = editor.value;
if (prompt.trim().length > 0) fetchPrediction(prompt);
};
tempInput.addEventListener('change', () => {
triggerUpdate();
// Optional: close sidebar on change if on mobile
// closeSidebarOnMobile();
});
lengthInput.addEventListener('change', () => {
triggerUpdate();
});
// --- Core Functions ---
const fetchPrediction = async (prompt, customLength = null) => {
if (isFetching) return;
isFetching = true;
status.textContent = 'Thinking...';
statusIndicator.classList.add('fetching');
const n = parseInt(nGramSelect.value);
const temperature = parseFloat(tempInput.value);
const length = customLength || parseInt(lengthInput.value);
try {
const response = await fetch('/api/predict', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ prompt, n, temperature, length }),
});
if (!response.ok) throw new Error('Network response failed');
const data = await response.json();
if (customLength) {
insertText(data.prediction || '');
} else {
currentSuggestion = data.prediction || '';
updateSuggestion();
}
} catch (error) {
console.error('Prediction failed:', error);
status.textContent = 'Error';
} finally {
isFetching = false;
status.textContent = 'Idle';
statusIndicator.classList.remove('fetching');
}
};
const updateSuggestion = () => {
const editorText = editor.value;
const space = (editorText.length > 0 && !/\s$/.test(editorText)) ? ' ' : '';
suggestionOverlay.textContent = editorText + space + currentSuggestion;
// Show/hide accept button
if (currentSuggestion) {
acceptSuggestionBtn.classList.add('visible');
} else {
acceptSuggestionBtn.classList.remove('visible');
}
};
const insertText = (text) => {
if (!text) return;
const space = (editor.value.length > 0 && !/\s$/.test(editor.value)) ? ' ' : '';
editor.value += space + text;
currentSuggestion = '';
updateSuggestion();
// Ensure the editor scrolls with content
editor.scrollTop = editor.scrollHeight;
};
// --- Event Handlers ---
editor.addEventListener('input', () => {
clearTimeout(debounceTimer);
currentSuggestion = '';
updateSuggestion();
const prompt = editor.value;
if (prompt.trim().length === 0) return;
debounceTimer = setTimeout(() => fetchPrediction(prompt), 300);
});
editor.addEventListener('keydown', (e) => {
if (e.key === 'Tab' && currentSuggestion) {
e.preventDefault();
insertText(currentSuggestion);
fetchPrediction(editor.value);
}
});
acceptSuggestionBtn.addEventListener('click', () => {
if (currentSuggestion) {
insertText(currentSuggestion);
fetchPrediction(editor.value);
editor.focus();
}
});
generateBtn.addEventListener('click', () => {
fetchPrediction(editor.value, 50);
closeSidebarOnMobile();
});
// Sync scroll
editor.addEventListener('scroll', () => {
suggestionOverlay.scrollTop = editor.scrollTop;
});
// Initialize UI badges
tempInput.addEventListener("input", updateUI);
lengthInput.addEventListener("input", updateUI);
nGramSelect.addEventListener("change", () => {
updateUI();
triggerUpdate();
});
const triggerUpdate = () => {
currentSuggestion = "";
updateSuggestion();
const prompt = editor.value;
if (prompt.trim().length > 0) fetchPrediction(prompt);
};
tempInput.addEventListener("change", () => {
triggerUpdate();
});
lengthInput.addEventListener("change", () => {
triggerUpdate();
});
// --- Core Functions ---
const fetchPrediction = async (prompt, customLength = null) => {
if (isFetching) return;
isFetching = true;
status.textContent = "Thinking...";
if (statusIndicator) statusIndicator.classList.add("fetching");
const n = parseInt(nGramSelect.value);
const temperature = parseFloat(tempInput.value);
const length = customLength || parseInt(lengthInput.value);
try {
const response = await fetch("/api/predict", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ prompt, n, temperature, length }),
});
if (!response.ok) throw new Error("Network response failed");
const data = await response.json();
if (customLength) {
insertText(data.prediction || "");
} else {
currentSuggestion = data.prediction || "";
updateSuggestion();
}
} catch (error) {
console.error("Prediction failed:", error);
status.textContent = "Error: " + error.message;
} finally {
isFetching = false;
status.textContent = "Idle";
if (statusIndicator) statusIndicator.classList.remove("fetching");
}
};
const updateSuggestion = () => {
suggestionOverlay.innerHTML = "";
const editorText = editor.value;
const space = editorText.length > 0 && !/\s$/.test(editorText) ? " " : "";
// Create invisible text span to match editor content exactly
const textSpan = document.createElement("span");
textSpan.textContent = editorText + space;
textSpan.style.color = "transparent";
suggestionOverlay.appendChild(textSpan);
if (currentSuggestion) {
const suggestionSpan = document.createElement("span");
suggestionSpan.textContent = currentSuggestion;
suggestionSpan.className = "suggestion-highlight";
// Add Tab Icon (Better SVG for Tab Key)
const iconSpan = document.createElement("span");
iconSpan.style.pointerEvents = "none"; // Ensure clicks pass through to suggestionSpan
iconSpan.innerHTML = `<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="tab-icon"><path d="M21 9V15"/><path d="M3 12H17"/><path d="m14 9 3 3-3 3"/></svg>`;
suggestionSpan.appendChild(iconSpan);
suggestionOverlay.appendChild(suggestionSpan);
}
};
const insertText = (text) => {
if (!text) return;
const space =
editor.value.length > 0 && !/\s$/.test(editor.value) ? " " : "";
editor.value += space + text;
currentSuggestion = "";
updateSuggestion();
// Ensure the editor scrolls with content
editor.scrollTop = editor.scrollHeight;
};
// --- Event Handlers ---
editor.addEventListener("input", () => {
clearTimeout(debounceTimer);
currentSuggestion = "";
updateSuggestion();
const prompt = editor.value;
if (prompt.trim().length === 0) return;
debounceTimer = setTimeout(() => fetchPrediction(prompt), 300);
});
editor.addEventListener("keydown", (e) => {
if (e.key === "Tab" && currentSuggestion) {
e.preventDefault();
insertText(currentSuggestion);
fetchPrediction(editor.value);
}
});
// Handle click on suggestion
suggestionOverlay.addEventListener("click", (e) => {
if (e.target.classList.contains("suggestion-highlight")) {
insertText(currentSuggestion);
fetchPrediction(editor.value);
editor.focus();
}
});
if (generateBtn) {
generateBtn.addEventListener("click", () => {
fetchPrediction(editor.value, 50);
closeSidebarOnMobile();
});
}
// Sync scroll
editor.addEventListener("scroll", () => {
suggestionOverlay.scrollTop = editor.scrollTop;
});
// Initialize UI badges
updateUI();
});

View File

@@ -117,44 +117,6 @@ body {
}
}
/* Floating Action Button for Suggestions */
.btn-floating {
position: absolute;
bottom: 1.5rem;
right: 1.5rem;
width: 3.5rem;
height: 3.5rem;
border-radius: 50%;
background-color: var(--primary);
color: var(--primary-foreground);
border: none;
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.3);
cursor: pointer;
display: none; /* Hidden by default, shown when suggestion exists */
align-items: center;
justify-content: center;
z-index: 10;
transition:
transform 0.2s,
opacity 0.2s;
}
.btn-floating:active {
transform: scale(0.95);
}
@media (max-width: 768px) {
.btn-floating.visible {
display: flex;
}
}
@media (min-width: 769px) {
.btn-floating.visible {
display: none;
}
}
.sidebar-header {
margin-bottom: 2.5rem;
}
@@ -369,14 +331,14 @@ label {
border: 1px solid var(--border);
border-radius: var(--radius);
background-color: var(--card);
overflow-y: auto;
overflow: hidden;
}
#editor,
#suggestion-overlay {
width: 100%;
height: 100%;
padding: 2rem;
padding: 2rem 2rem 10rem 2rem;
font-family: "SF Mono", "Fira Code", monospace;
font-size: 1.1rem;
line-height: 1.8;
@@ -391,7 +353,7 @@ label {
@media (max-width: 768px) {
#editor,
#suggestion-overlay {
padding: 1rem;
padding: 1rem 1rem 10rem 1rem;
font-size: 1rem;
}
}
@@ -402,14 +364,59 @@ label {
color: var(--foreground);
outline: none;
resize: none;
overflow-y: scroll;
}
#editor::-webkit-scrollbar {
width: 16px;
}
#editor::-webkit-scrollbar-thumb {
background-color: var(--muted);
border: 4px solid var(--card);
border-radius: 8px;
}
#editor::-webkit-scrollbar-track {
background-color: transparent;
}
#suggestion-overlay {
position: absolute;
top: 0;
left: 0;
z-index: 1;
color: var(--muted-foreground);
z-index: 3;
color: transparent;
pointer-events: none;
opacity: 0.5;
overflow: hidden;
overflow-y: scroll;
}
#suggestion-overlay::-webkit-scrollbar {
width: 16px; /* Width must match standard scrollbar width to align text */
background: transparent;
}
#suggestion-overlay::-webkit-scrollbar-thumb {
background: transparent;
}
.suggestion-highlight {
color: var(--muted-foreground);
cursor: pointer;
pointer-events: auto;
}
.tab-icon {
display: inline-block;
width: 1.8em;
height: 1.2em;
vertical-align: middle;
margin-left: 0.5em;
opacity: 0.8;
border: 1px solid var(--muted-foreground);
border-radius: 4px;
padding: 2px;
pointer-events: none;
}