Compare commits

..

17 Commits

Author SHA1 Message Date
N0\A
42864c5491 Should fully work now 2026-01-09 00:47:49 +01:00
N0\A
3131fd3531 Hopefully? 2026-01-09 00:45:01 +01:00
N0\A
d936ac872a Maybe now? 2026-01-09 00:40:25 +01:00
N0\A
019f85a29a HTTPS fix 2026-01-09 00:35:21 +01:00
N0\A
87c56ba2f8 Better suggestion accept on mobile and word wrap offset fix 2026-01-09 00:24:38 +01:00
5bd7ccf76a Update index.html 2026-01-07 11:17:49 +01:00
630d8ebdc0 Update api.html 2026-01-07 11:02:22 +01:00
6f0977be50 Mobile UI fix 2026-01-07 10:57:57 +01:00
86c20628c8 API page scrolling fix 2026-01-07 10:42:17 +01:00
56b4e056c3 API page 2026-01-07 10:40:22 +01:00
b5d5195f8f Layout fix 2026-01-07 10:39:32 +01:00
e625447222 Update README.md 2026-01-07 09:19:49 +00:00
a80fa055af Add README.md 2026-01-06 21:12:11 +00:00
cc484dbaed Update Dockerfile 2026-01-06 21:02:08 +00:00
2f6c6a0151 desktop styles fix 2026-01-06 21:52:27 +01:00
f1146ecdcb Icons and descriptions 2026-01-06 21:43:06 +01:00
9d5118f973 Mobile UI 2026-01-06 21:32:01 +01:00
10 changed files with 829 additions and 157 deletions

View File

@@ -1,7 +1,8 @@
FROM python:3.11-slim FROM python:3.11-slim
WORKDIR /app WORKDIR /app
RUN rm -rf /app/*
COPY requirements.txt . COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt RUN pip install --no-cache-dir -r requirements.txt
COPY . . COPY . .
EXPOSE 8000 EXPOSE 8000
CMD ["uvicorn", "api:app", "--host", "0.0.0.0", "--port", "8000"] CMD ["uvicorn", "api:app", "--host", "0.0.0.0", "--port", "8000", "--proxy-headers", "--forwarded-allow-ips", "*"]

0
README.md Normal file
View File

58
api.py
View File

@@ -1,13 +1,15 @@
import os import os
import uvicorn
from fastapi import FastAPI, Body
from fastapi.staticfiles import StaticFiles
from fastapi.responses import FileResponse
from pydantic import BaseModel
import sys import sys
import uvicorn
from fastapi import Body, FastAPI
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import FileResponse
from fastapi.staticfiles import StaticFiles
from pydantic import BaseModel
# Import core LLM logic # Import core LLM logic
from llm import load_or_train_model, generate_text, SOURCES_DIR from llm import SOURCES_DIR, generate_text, load_or_train_model
# --- Configuration --- # --- Configuration ---
# Models to pre-load on startup # Models to pre-load on startup
@@ -18,19 +20,31 @@ UI_DIR = "ui"
# Cache for loaded models: {n: model} # Cache for loaded models: {n: model}
MODEL_CACHE = {} MODEL_CACHE = {}
# --- Pydantic Models --- # --- Pydantic Models ---
class PredictRequest(BaseModel): class PredictRequest(BaseModel):
prompt: str prompt: str
temperature: float = 0.7 temperature: float = 1.6
n: int = 3 n: int = 4
length: int = 5 length: int = 5
class PredictResponse(BaseModel): class PredictResponse(BaseModel):
prediction: str prediction: str
# --- FastAPI App --- # --- FastAPI App ---
app = FastAPI() app = FastAPI()
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
def get_model_for_n(n: int): def get_model_for_n(n: int):
""" """
Retrieves the model for a specific N from cache, or loads/trains it. Retrieves the model for a specific N from cache, or loads/trains it.
@@ -38,12 +52,13 @@ def get_model_for_n(n: int):
global MODEL_CACHE global MODEL_CACHE
if n in MODEL_CACHE: if n in MODEL_CACHE:
return MODEL_CACHE[n] return MODEL_CACHE[n]
print(f"Loading/Training model for N={n}...") print(f"Loading/Training model for N={n}...")
model = load_or_train_model(SOURCES_DIR, n) model = load_or_train_model(SOURCES_DIR, n)
MODEL_CACHE[n] = model MODEL_CACHE[n] = model
return model return model
@app.on_event("startup") @app.on_event("startup")
def startup_event(): def startup_event():
""" """
@@ -54,6 +69,7 @@ def startup_event():
get_model_for_n(n) get_model_for_n(n)
print(f"Models for N={PRELOAD_N_GRAMS} loaded. Server is ready.") print(f"Models for N={PRELOAD_N_GRAMS} loaded. Server is ready.")
@app.post("/api/predict", response_model=PredictResponse) @app.post("/api/predict", response_model=PredictResponse)
async def predict(request: PredictRequest): async def predict(request: PredictRequest):
""" """
@@ -61,7 +77,7 @@ async def predict(request: PredictRequest):
""" """
n = max(2, min(request.n, 5)) n = max(2, min(request.n, 5))
model = get_model_for_n(n) model = get_model_for_n(n)
if not model: if not model:
return {"prediction": ""} return {"prediction": ""}
@@ -70,23 +86,35 @@ async def predict(request: PredictRequest):
prediction = generate_text( prediction = generate_text(
model, model,
start_prompt=request.prompt, start_prompt=request.prompt,
length=length, length=length,
temperature=request.temperature temperature=request.temperature,
) )
return PredictResponse(prediction=prediction) return PredictResponse(prediction=prediction)
@app.get("/api")
async def api_docs():
"""
API documentation page.
"""
return FileResponse(os.path.join(UI_DIR, "api.html"))
# --- Static Files and Root --- # --- Static Files and Root ---
app.mount("/ui", StaticFiles(directory=UI_DIR), name="ui") app.mount("/ui", StaticFiles(directory=UI_DIR), name="ui")
@app.get("/") @app.get("/")
async def read_root(): async def read_root():
return FileResponse(os.path.join(UI_DIR, "index.html")) return FileResponse(os.path.join(UI_DIR, "index.html"))
def run(): def run():
# Read port from environment variable, default to 8000 # Read port from environment variable, default to 8000
port = int(os.environ.get("PORT", 8000)) port = int(os.environ.get("PORT", 8000))
uvicorn.run(app, host="0.0.0.0", port=port) uvicorn.run(app, host="0.0.0.0", port=port, proxy_headers=True, forwarded_allow_ips="*")
if __name__ == "__main__": if __name__ == "__main__":
run() run()

1
icon.svg Normal file
View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="#fafafa"><path d="m12 3-1.912 5.813a2 2 0 0 1-1.275 1.275L3 12l5.813 1.912a2 2 0 0 1 1.275 1.275L12 21l1.912-5.813a2 2 0 0 1 1.275-1.275L21 12l-5.813-1.912a2 2 0 0 1-1.275-1.275L12 3Z"/></svg>

After

Width:  |  Height:  |  Size: 282 B

390
ui/api.html Normal file
View File

@@ -0,0 +1,390 @@
<!doctype html>
<html lang="en" class="dark">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Kreatyw - API Documentation</title>
<link rel="stylesheet" href="/ui/style.css?v=2" />
<link rel="icon" type="image/x-icon" href="/ui/favicon.ico" />
<style>
body {
overflow: auto !important;
height: auto !important;
}
.doc-container {
max-width: 900px;
margin: 0 auto;
padding: 3rem 2rem;
}
.doc-header {
margin-bottom: 3rem;
border-bottom: 1px solid var(--border);
padding-bottom: 2rem;
}
.doc-header h1 {
font-size: 2.5rem;
margin: 0 0 0.5rem 0;
font-weight: 700;
}
.doc-header p {
color: var(--muted-foreground);
font-size: 1.125rem;
margin: 0;
}
.section {
margin-bottom: 3rem;
}
.section h2 {
font-size: 1.75rem;
margin: 0 0 1rem 0;
font-weight: 600;
}
.section h3 {
font-size: 1.25rem;
margin: 2rem 0 1rem 0;
font-weight: 600;
color: var(--muted-foreground);
}
.section p {
line-height: 1.7;
color: var(--foreground);
margin: 0 0 1rem 0;
}
.code-block {
background-color: var(--secondary);
border: 1px solid var(--border);
border-radius: var(--radius);
padding: 1.5rem;
overflow-x: auto;
margin: 1rem 0;
}
.code-block pre {
margin: 0;
font-family: "SF Mono", "Fira Code", monospace;
font-size: 0.875rem;
line-height: 1.6;
color: var(--foreground);
}
.inline-code {
background-color: var(--secondary);
padding: 0.2rem 0.4rem;
border-radius: 4px;
font-family: "SF Mono", "Fira Code", monospace;
font-size: 0.875em;
color: var(--foreground);
}
.param-table {
width: 100%;
border-collapse: collapse;
margin: 1rem 0;
}
.param-table th,
.param-table td {
text-align: left;
padding: 0.75rem;
border-bottom: 1px solid var(--border);
}
.param-table th {
font-weight: 600;
color: var(--muted-foreground);
font-size: 0.875rem;
text-transform: uppercase;
letter-spacing: 0.05em;
}
.param-table td {
color: var(--foreground);
}
.param-table tr:last-child td {
border-bottom: none;
}
.badge {
display: inline-block;
padding: 0.25rem 0.5rem;
border-radius: 4px;
font-size: 0.75rem;
font-weight: 600;
text-transform: uppercase;
}
.badge-post {
background-color: #10b981;
color: white;
}
.badge-required {
background-color: #ef4444;
color: white;
}
.badge-optional {
background-color: var(--secondary);
color: var(--muted-foreground);
}
.back-link {
display: inline-flex;
align-items: center;
gap: 0.5rem;
color: var(--foreground);
text-decoration: none;
margin-bottom: 2rem;
font-weight: 500;
transition: opacity 0.2s;
}
.back-link:hover {
opacity: 0.8;
}
</style>
</head>
<body>
<div class="doc-container">
<a href="/" class="back-link">
<svg
xmlns="http://www.w3.org/2000/svg"
width="20"
height="20"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
>
<line x1="19" y1="12" x2="5" y2="12"></line>
<polyline points="12 19 5 12 12 5"></polyline>
</svg>
Back to Editor
</a>
<div class="doc-header">
<h1>Kreatyw API</h1>
<p>Text generation API powered by N-gram language models</p>
</div>
<div class="section">
<h2>Overview</h2>
<p>
The Kreatyw API provides a simple REST endpoint for
generating text continuations using N-gram language models.
The API uses Markov chains trained on source texts to
predict and generate coherent text sequences.
</p>
</div>
<div class="section">
<h2>Base URL</h2>
<div class="code-block">
<pre>https://kreatyw.krzak.org</pre>
</div>
</div>
<div class="section">
<h2>Endpoints</h2>
<h3>POST /api/predict</h3>
<p>Generate text continuation based on a given prompt.</p>
<h3>Request Body</h3>
<table class="param-table">
<thead>
<tr>
<th>Parameter</th>
<th>Type</th>
<th>Required</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td><span class="inline-code">prompt</span></td>
<td>string</td>
<td>
<span class="badge badge-required"
>Required</span
>
</td>
<td>The starting text to continue from</td>
</tr>
<tr>
<td><span class="inline-code">n</span></td>
<td>integer</td>
<td>
<span class="badge badge-optional"
>Optional</span
>
</td>
<td>
N-gram size (2-5). Default: 4. Higher values
produce more coherent but less creative text.
</td>
</tr>
<tr>
<td>
<span class="inline-code">temperature</span>
</td>
<td>float</td>
<td>
<span class="badge badge-optional"
>Optional</span
>
</td>
<td>
Sampling temperature (0.1-2.0). Default: 1.6.
Higher values increase randomness.
</td>
</tr>
<tr>
<td><span class="inline-code">length</span></td>
<td>integer</td>
<td>
<span class="badge badge-optional"
>Optional</span
>
</td>
<td>
Number of words to generate (1-500). Default: 5.
</td>
</tr>
</tbody>
</table>
<h3>Response</h3>
<table class="param-table">
<thead>
<tr>
<th>Field</th>
<th>Type</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td><span class="inline-code">prediction</span></td>
<td>string</td>
<td>The generated text continuation</td>
</tr>
</tbody>
</table>
<h3>Example Request</h3>
<div class="code-block">
<pre>
curl -X POST http://localhost:8000/api/predict \
-H "Content-Type: application/json" \
-d '{
"prompt": "Kiedyś tak było",
"n": 4,
"temperature": 1.2,
"length": 20
}'</pre
>
</div>
<h3>Example Response</h3>
<div class="code-block">
<pre>
{
"prediction": "przezroczyste, że prawie ich dostrzec nie mógł. Słysząc bowiem tyle o jej egzystencji. Zaiste z pogardą arcykapłańskich święceń i źle traktujesz sługi boże."
}</pre
>
</div>
<h3>JavaScript Example</h3>
<div class="code-block">
<pre>
const response = await fetch('/api/predict', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
prompt: 'Kiedyś tak było',
n: 3,
temperature: 0.8,
length: 15
})
});
const data = await response.json();
console.log(data.prediction);</pre
>
</div>
<h3>Python Example</h3>
<div class="code-block">
<pre>
import requests
response = requests.post('http://localhost:8000/api/predict',
json={
'prompt': 'Kiedyś tak było',
'n': 4,
'temperature': 1.0,
'length': 25
}
)
result = response.json()
print(result['prediction'])</pre
>
</div>
</div>
<div class="section">
<h2>Model Parameters</h2>
<h3>N-gram Size (n)</h3>
<p>
Controls the context window size. Higher values use more
context words to predict the next word:
</p>
<ul style="line-height: 1.8; color: var(--foreground)">
<li>
<strong>n=2 (Bigram):</strong> Uses 1 previous word for
context. Very creative but less coherent.
</li>
<li>
<strong>n=3 (Trigram):</strong> Uses 2 previous words.
Balanced creativity and coherence.
</li>
<li>
<strong>n=4 (Tetragram):</strong> Uses 3 previous words.
More coherent, less random.
</li>
<li>
<strong>n=5 (Pentagram):</strong> Uses 4 previous words.
Most coherent, closest to training data.
</li>
</ul>
<h3>Temperature</h3>
<p>Controls the randomness of predictions:</p>
<ul style="line-height: 1.8; color: var(--foreground)">
<li>
<strong>Low (0.1-0.5):</strong> More deterministic,
picks most likely words.
</li>
<li>
<strong>Medium (0.6-1.0):</strong> Balanced between
predictability and creativity.
</li>
<li>
<strong>High (1.1-2.0):</strong> More random and
creative, may produce unexpected results.
</li>
</ul>
</div>
<div class="section">
<h2>Error Handling</h2>
<p>The API returns standard HTTP status codes:</p>
<ul style="line-height: 1.8; color: var(--foreground)">
<li><strong>200 OK:</strong> Request successful</li>
<li>
<strong>422 Unprocessable Entity:</strong> Invalid
request parameters
</li>
<li>
<strong>500 Internal Server Error:</strong> Server error
</li>
</ul>
</div>
</div>
</body>
</html>

BIN
ui/favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 103 KiB

BIN
ui/icon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 42 KiB

View File

@@ -4,12 +4,58 @@
<meta charset="UTF-8" /> <meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" /> <meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Kreatyw</title> <title>Kreatyw</title>
<link rel="stylesheet" href="/ui/style.css" /> <link rel="stylesheet" href="/ui/style.css?v=2" />
<link rel="icon" type="image/x-icon" href="/ui/favicon.ico" />
<meta property="og:type" content="website" />
<meta property="og:title" content="Kreatyw" />
<meta property="og:description" content="" />
<meta property="og:image" content="/ui/icon.png" />
<meta name="theme-color" content="#fafafa" />
<meta name="twitter:card" content="summary" />
<meta name="twitter:title" content="Kreatyw" />
<meta name="twitter:image" content="/ui/icon.png" />
</head> </head>
<body> <body>
<div class="mobile-header">
<div class="brand-wrapper">
<svg
xmlns="http://www.w3.org/2000/svg"
width="24"
height="24"
viewBox="0 0 24 24"
fill="white"
class="sparkle-icon"
>
<path
d="m12 3-1.912 5.813a2 2 0 0 1-1.275 1.275L3 12l5.813 1.912a2 2 0 0 1 1.275 1.275L12 21l1.912-5.813a2 2 0 0 1 1.275-1.275L21 12l-5.813-1.912a2 2 0 0 1-1.275-1.275L12 3Z"
/>
</svg>
<h1 class="brand">Kreatyw</h1>
</div>
<button id="sidebar-toggle" class="btn-icon">
<svg
xmlns="http://www.w3.org/2000/svg"
width="24"
height="24"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
>
<line x1="3" y1="12" x2="21" y2="12"></line>
<line x1="3" y1="6" x2="21" y2="6"></line>
<line x1="3" y1="18" x2="21" y2="18"></line>
</svg>
</button>
</div>
<div class="app-layout"> <div class="app-layout">
<aside class="sidebar"> <aside class="sidebar" id="sidebar">
<div class="sidebar-header"> <div class="sidebar-header desktop-only">
<div class="brand-wrapper"> <div class="brand-wrapper">
<svg <svg
xmlns="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg"
@@ -98,12 +144,12 @@
id="editor" id="editor"
spellcheck="false" spellcheck="false"
autofocus autofocus
placeholder="Start writing something poetic..." placeholder="Start typing here..."
></textarea> ></textarea>
</div> </div>
</div> </div>
</main> </main>
</div> </div>
<script src="/ui/script.js"></script> <script src="/ui/script.js?v=2"></script>
</body> </body>
</html> </html>

View File

@@ -1,135 +1,184 @@
document.addEventListener("DOMContentLoaded", () => {
const editor = document.getElementById("editor");
const suggestionOverlay = document.getElementById("suggestion-overlay");
const status = document.getElementById("status");
const statusIndicator = document.querySelector(".status-indicator");
document.addEventListener('DOMContentLoaded', () => { // Controls
const editor = document.getElementById('editor'); const nGramSelect = document.getElementById("n-gram");
const suggestionOverlay = document.getElementById('suggestion-overlay'); const nValDisplay = document.getElementById("n-val");
const status = document.getElementById('status'); const tempInput = document.getElementById("temperature");
const statusIndicator = document.querySelector('.status-indicator'); const tempValDisplay = document.getElementById("temp-val");
const lengthInput = document.getElementById("length");
// Controls const lengthValDisplay = document.getElementById("length-val");
const nGramSelect = document.getElementById('n-gram'); const generateBtn = document.getElementById("generate-more-btn");
const nValDisplay = document.getElementById('n-val'); const sidebarToggle = document.getElementById("sidebar-toggle");
const tempInput = document.getElementById('temperature'); const sidebar = document.getElementById("sidebar");
const tempValDisplay = document.getElementById('temp-val');
const lengthInput = document.getElementById('length');
const lengthValDisplay = document.getElementById('length-val');
const generateBtn = document.getElementById('generate-more-btn');
let currentSuggestion = ''; let currentSuggestion = "";
let isFetching = false; let isFetching = false;
let debounceTimer; let debounceTimer;
// --- UI Logic --- // --- UI Logic ---
const updateUI = () => {
nValDisplay.textContent = nGramSelect.value;
tempValDisplay.textContent = tempInput.value;
lengthValDisplay.textContent = lengthInput.value;
};
tempInput.addEventListener('input', updateUI); const updateUI = () => {
lengthInput.addEventListener('input', updateUI); nValDisplay.textContent = nGramSelect.value;
nGramSelect.addEventListener('change', () => { tempValDisplay.textContent = tempInput.value;
updateUI(); lengthValDisplay.textContent = lengthInput.value;
triggerUpdate(); };
});
const triggerUpdate = () => { sidebarToggle.addEventListener("click", () => {
currentSuggestion = ''; sidebar.classList.toggle("open");
updateSuggestion(); });
const prompt = editor.value;
if (prompt.trim().length > 0) fetchPrediction(prompt);
};
tempInput.addEventListener('change', triggerUpdate); const closeSidebarOnMobile = () => {
lengthInput.addEventListener('change', triggerUpdate); if (window.innerWidth <= 768) {
sidebar.classList.remove("open");
}
};
// --- Core Functions --- tempInput.addEventListener("input", updateUI);
lengthInput.addEventListener("input", updateUI);
const fetchPrediction = async (prompt, customLength = null) => { nGramSelect.addEventListener("change", () => {
if (isFetching) return;
isFetching = true;
status.textContent = 'Thinking...';
statusIndicator.classList.add('fetching');
const n = parseInt(nGramSelect.value);
const temperature = parseFloat(tempInput.value);
const length = customLength || parseInt(lengthInput.value);
try {
const response = await fetch('/api/predict', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ prompt, n, temperature, length }),
});
if (!response.ok) throw new Error('Network response failed');
const data = await response.json();
if (customLength) {
insertText(data.prediction || '');
} else {
currentSuggestion = data.prediction || '';
updateSuggestion();
}
} catch (error) {
console.error('Prediction failed:', error);
status.textContent = 'Error';
} finally {
isFetching = false;
status.textContent = 'Idle';
statusIndicator.classList.remove('fetching');
}
};
const updateSuggestion = () => {
const editorText = editor.value;
const space = (editorText.length > 0 && !/\s$/.test(editorText)) ? ' ' : '';
suggestionOverlay.textContent = editorText + space + currentSuggestion;
};
const insertText = (text) => {
if (!text) return;
const space = (editor.value.length > 0 && !/\s$/.test(editor.value)) ? ' ' : '';
editor.value += space + text;
currentSuggestion = '';
updateSuggestion();
// Ensure the editor scrolls with content
editor.scrollTop = editor.scrollHeight;
};
// --- Event Handlers ---
editor.addEventListener('input', () => {
clearTimeout(debounceTimer);
currentSuggestion = '';
updateSuggestion();
const prompt = editor.value;
if (prompt.trim().length === 0) return;
debounceTimer = setTimeout(() => fetchPrediction(prompt), 300);
});
editor.addEventListener('keydown', (e) => {
if (e.key === 'Tab' && currentSuggestion) {
e.preventDefault();
insertText(currentSuggestion);
fetchPrediction(editor.value);
}
});
generateBtn.addEventListener('click', () => {
fetchPrediction(editor.value, 50);
});
// Sync scroll
editor.addEventListener('scroll', () => {
suggestionOverlay.scrollTop = editor.scrollTop;
});
// Initialize UI badges
updateUI(); updateUI();
triggerUpdate();
});
const triggerUpdate = () => {
currentSuggestion = "";
updateSuggestion();
const prompt = editor.value;
if (prompt.trim().length > 0) fetchPrediction(prompt);
};
tempInput.addEventListener("change", () => {
triggerUpdate();
});
lengthInput.addEventListener("change", () => {
triggerUpdate();
});
// --- Core Functions ---
const fetchPrediction = async (prompt, customLength = null) => {
if (isFetching) return;
isFetching = true;
status.textContent = "Thinking...";
if (statusIndicator) statusIndicator.classList.add("fetching");
const n = parseInt(nGramSelect.value);
const temperature = parseFloat(tempInput.value);
const length = customLength || parseInt(lengthInput.value);
try {
const response = await fetch("/api/predict", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ prompt, n, temperature, length }),
});
if (!response.ok) throw new Error("Network response failed");
const data = await response.json();
if (customLength) {
insertText(data.prediction || "");
} else {
currentSuggestion = data.prediction || "";
updateSuggestion();
}
} catch (error) {
console.error("Prediction failed:", error);
status.textContent = "Error: " + error.message;
} finally {
isFetching = false;
status.textContent = "Idle";
if (statusIndicator) statusIndicator.classList.remove("fetching");
}
};
const updateSuggestion = () => {
suggestionOverlay.innerHTML = "";
const editorText = editor.value;
const space = editorText.length > 0 && !/\s$/.test(editorText) ? " " : "";
// Create invisible text span to match editor content exactly
const textSpan = document.createElement("span");
textSpan.textContent = editorText + space;
textSpan.style.color = "transparent";
suggestionOverlay.appendChild(textSpan);
if (currentSuggestion) {
const suggestionSpan = document.createElement("span");
suggestionSpan.textContent = currentSuggestion;
suggestionSpan.className = "suggestion-highlight";
// Add Tab Icon (Better SVG for Tab Key)
const iconSpan = document.createElement("span");
iconSpan.style.pointerEvents = "none"; // Ensure clicks pass through to suggestionSpan
iconSpan.innerHTML = `<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="tab-icon"><path d="M21 9V15"/><path d="M3 12H17"/><path d="m14 9 3 3-3 3"/></svg>`;
suggestionSpan.appendChild(iconSpan);
suggestionOverlay.appendChild(suggestionSpan);
}
};
const insertText = (text) => {
if (!text) return;
const space =
editor.value.length > 0 && !/\s$/.test(editor.value) ? " " : "";
editor.value += space + text;
currentSuggestion = "";
updateSuggestion();
// Ensure the editor scrolls with content
editor.scrollTop = editor.scrollHeight;
};
// --- Event Handlers ---
editor.addEventListener("input", () => {
clearTimeout(debounceTimer);
currentSuggestion = "";
updateSuggestion();
const prompt = editor.value;
if (prompt.trim().length === 0) return;
debounceTimer = setTimeout(() => fetchPrediction(prompt), 300);
});
editor.addEventListener("keydown", (e) => {
if (e.key === "Tab" && currentSuggestion) {
e.preventDefault();
insertText(currentSuggestion);
fetchPrediction(editor.value);
}
});
// Handle click on suggestion
suggestionOverlay.addEventListener("click", (e) => {
if (e.target.classList.contains("suggestion-highlight")) {
insertText(currentSuggestion);
fetchPrediction(editor.value);
editor.focus();
}
});
if (generateBtn) {
generateBtn.addEventListener("click", () => {
fetchPrediction(editor.value, 50);
closeSidebarOnMobile();
});
}
// Sync scroll
editor.addEventListener("scroll", () => {
suggestionOverlay.scrollTop = editor.scrollTop;
});
// Initialize UI badges
updateUI();
}); });

View File

@@ -24,18 +24,71 @@
body { body {
background-color: var(--background); background-color: var(--background);
color: var(--foreground); color: var(--foreground);
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif; font-family:
-apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue",
Arial, sans-serif;
margin: 0; margin: 0;
height: 100vh; height: 100vh;
overflow: hidden; overflow: hidden;
} }
@media (max-width: 768px) {
body {
height: auto;
overflow: auto;
}
}
/* Mobile Header */
.mobile-header {
display: none;
background-color: var(--card);
border-bottom: 1px solid var(--border);
padding: 0.75rem 1rem;
justify-content: space-between;
align-items: center;
position: sticky;
top: 0;
z-index: 100;
}
.btn-icon {
background: transparent;
border: none;
color: var(--foreground);
cursor: pointer;
padding: 0.5rem;
display: flex;
align-items: center;
justify-content: center;
}
.desktop-only {
display: block;
}
@media (max-width: 768px) {
.mobile-header {
display: flex;
}
.desktop-only {
display: none;
}
}
.app-layout { .app-layout {
display: flex; display: flex;
height: 100vh; height: 100vh;
width: 100vw; width: 100vw;
} }
@media (max-width: 768px) {
.app-layout {
flex-direction: column;
height: calc(100vh - 57px); /* Subtract header height */
}
}
/* Sidebar */ /* Sidebar */
.sidebar { .sidebar {
width: 300px; width: 300px;
@@ -45,12 +98,35 @@ body {
flex-direction: column; flex-direction: column;
padding: 1.5rem; padding: 1.5rem;
flex-shrink: 0; flex-shrink: 0;
transition: transform 0.3s ease-in-out;
}
@media (max-width: 768px) {
.sidebar {
position: fixed;
top: 57px;
left: 0;
width: 100%;
height: calc(100vh - 57px);
z-index: 90;
transform: translateX(-100%);
border-right: none;
}
.sidebar.open {
transform: translateX(0);
}
} }
.sidebar-header { .sidebar-header {
margin-bottom: 2.5rem; margin-bottom: 2.5rem;
} }
@media (max-width: 768px) {
.sidebar-header {
margin-bottom: 1rem;
}
}
.brand-wrapper { .brand-wrapper {
display: flex; display: flex;
align-items: center; align-items: center;
@@ -77,6 +153,13 @@ body {
flex-grow: 1; flex-grow: 1;
} }
@media (max-width: 768px) {
.settings {
gap: 1rem;
margin-bottom: 1rem;
}
}
.setting-item { .setting-item {
display: flex; display: flex;
flex-direction: column; flex-direction: column;
@@ -180,6 +263,12 @@ label {
border-top: 1px solid var(--border); border-top: 1px solid var(--border);
} }
@media (max-width: 768px) {
.sidebar-footer {
display: none; /* Hide footer on mobile to save space */
}
}
.status-indicator { .status-indicator {
display: flex; display: flex;
align-items: center; align-items: center;
@@ -202,9 +291,15 @@ label {
} }
@keyframes pulse { @keyframes pulse {
0% { opacity: 1; } 0% {
50% { opacity: 0.4; } opacity: 1;
100% { opacity: 1; } }
50% {
opacity: 0.4;
}
100% {
opacity: 1;
}
} }
/* Main Editor Area */ /* Main Editor Area */
@@ -216,6 +311,14 @@ label {
background-color: var(--background); background-color: var(--background);
} }
@media (max-width: 768px) {
.editor-main {
padding: 1rem;
height: 100%;
width: 100%;
}
}
.editor-container { .editor-container {
width: 100%; width: 100%;
max-width: 800px; max-width: 800px;
@@ -228,13 +331,14 @@ label {
border: 1px solid var(--border); border: 1px solid var(--border);
border-radius: var(--radius); border-radius: var(--radius);
background-color: var(--card); background-color: var(--card);
overflow-y: auto; overflow: hidden;
} }
#editor, #suggestion-overlay { #editor,
#suggestion-overlay {
width: 100%; width: 100%;
height: 100%; height: 100%;
padding: 2rem; padding: 2rem 2rem 10rem 2rem;
font-family: "SF Mono", "Fira Code", monospace; font-family: "SF Mono", "Fira Code", monospace;
font-size: 1.1rem; font-size: 1.1rem;
line-height: 1.8; line-height: 1.8;
@@ -246,20 +350,73 @@ label {
box-sizing: border-box; box-sizing: border-box;
} }
@media (max-width: 768px) {
#editor,
#suggestion-overlay {
padding: 1rem 1rem 10rem 1rem;
font-size: 1rem;
}
}
#editor { #editor {
position: relative; position: relative;
z-index: 2; z-index: 2;
color: var(--foreground); color: var(--foreground);
outline: none; outline: none;
resize: none; resize: none;
overflow-y: scroll;
}
#editor::-webkit-scrollbar {
width: 16px;
}
#editor::-webkit-scrollbar-thumb {
background-color: var(--muted);
border: 4px solid var(--card);
border-radius: 8px;
}
#editor::-webkit-scrollbar-track {
background-color: transparent;
} }
#suggestion-overlay { #suggestion-overlay {
position: absolute; position: absolute;
top: 0; top: 0;
left: 0; left: 0;
z-index: 1; z-index: 3;
color: var(--muted-foreground); color: transparent;
pointer-events: none; pointer-events: none;
opacity: 0.5; opacity: 0.5;
} overflow: hidden;
overflow-y: scroll;
}
#suggestion-overlay::-webkit-scrollbar {
width: 16px; /* Width must match standard scrollbar width to align text */
background: transparent;
}
#suggestion-overlay::-webkit-scrollbar-thumb {
background: transparent;
}
.suggestion-highlight {
color: var(--muted-foreground);
cursor: pointer;
pointer-events: auto;
}
.tab-icon {
display: inline-block;
width: 1.8em;
height: 1.2em;
vertical-align: middle;
margin-left: 0.5em;
opacity: 0.8;
border: 1px solid var(--muted-foreground);
border-radius: 4px;
padding: 2px;
pointer-events: none;
}