Layout fix
This commit is contained in:
@@ -1,3 +0,0 @@
|
|||||||
TODO:
|
|
||||||
- Fix the suggestion text not scrolling
|
|
||||||
- Add a /api page
|
|
||||||
25
api.py
25
api.py
@@ -1,13 +1,14 @@
|
|||||||
import os
|
import os
|
||||||
import uvicorn
|
|
||||||
from fastapi import FastAPI, Body
|
|
||||||
from fastapi.staticfiles import StaticFiles
|
|
||||||
from fastapi.responses import FileResponse
|
|
||||||
from pydantic import BaseModel
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
import uvicorn
|
||||||
|
from fastapi import Body, FastAPI
|
||||||
|
from fastapi.responses import FileResponse
|
||||||
|
from fastapi.staticfiles import StaticFiles
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
# Import core LLM logic
|
# Import core LLM logic
|
||||||
from llm import load_or_train_model, generate_text, SOURCES_DIR
|
from llm import SOURCES_DIR, generate_text, load_or_train_model
|
||||||
|
|
||||||
# --- Configuration ---
|
# --- Configuration ---
|
||||||
# Models to pre-load on startup
|
# Models to pre-load on startup
|
||||||
@@ -18,6 +19,7 @@ UI_DIR = "ui"
|
|||||||
# Cache for loaded models: {n: model}
|
# Cache for loaded models: {n: model}
|
||||||
MODEL_CACHE = {}
|
MODEL_CACHE = {}
|
||||||
|
|
||||||
|
|
||||||
# --- Pydantic Models ---
|
# --- Pydantic Models ---
|
||||||
class PredictRequest(BaseModel):
|
class PredictRequest(BaseModel):
|
||||||
prompt: str
|
prompt: str
|
||||||
@@ -25,12 +27,15 @@ class PredictRequest(BaseModel):
|
|||||||
n: int = 3
|
n: int = 3
|
||||||
length: int = 5
|
length: int = 5
|
||||||
|
|
||||||
|
|
||||||
class PredictResponse(BaseModel):
|
class PredictResponse(BaseModel):
|
||||||
prediction: str
|
prediction: str
|
||||||
|
|
||||||
|
|
||||||
# --- FastAPI App ---
|
# --- FastAPI App ---
|
||||||
app = FastAPI()
|
app = FastAPI()
|
||||||
|
|
||||||
|
|
||||||
def get_model_for_n(n: int):
|
def get_model_for_n(n: int):
|
||||||
"""
|
"""
|
||||||
Retrieves the model for a specific N from cache, or loads/trains it.
|
Retrieves the model for a specific N from cache, or loads/trains it.
|
||||||
@@ -44,6 +49,7 @@ def get_model_for_n(n: int):
|
|||||||
MODEL_CACHE[n] = model
|
MODEL_CACHE[n] = model
|
||||||
return model
|
return model
|
||||||
|
|
||||||
|
|
||||||
@app.on_event("startup")
|
@app.on_event("startup")
|
||||||
def startup_event():
|
def startup_event():
|
||||||
"""
|
"""
|
||||||
@@ -54,6 +60,7 @@ def startup_event():
|
|||||||
get_model_for_n(n)
|
get_model_for_n(n)
|
||||||
print(f"Models for N={PRELOAD_N_GRAMS} loaded. Server is ready.")
|
print(f"Models for N={PRELOAD_N_GRAMS} loaded. Server is ready.")
|
||||||
|
|
||||||
|
|
||||||
@app.post("/api/predict", response_model=PredictResponse)
|
@app.post("/api/predict", response_model=PredictResponse)
|
||||||
async def predict(request: PredictRequest):
|
async def predict(request: PredictRequest):
|
||||||
"""
|
"""
|
||||||
@@ -71,22 +78,26 @@ async def predict(request: PredictRequest):
|
|||||||
model,
|
model,
|
||||||
start_prompt=request.prompt,
|
start_prompt=request.prompt,
|
||||||
length=length,
|
length=length,
|
||||||
temperature=request.temperature
|
temperature=request.temperature,
|
||||||
)
|
)
|
||||||
|
|
||||||
return PredictResponse(prediction=prediction)
|
return PredictResponse(prediction=prediction)
|
||||||
|
|
||||||
|
|
||||||
# --- Static Files and Root ---
|
# --- Static Files and Root ---
|
||||||
app.mount("/ui", StaticFiles(directory=UI_DIR), name="ui")
|
app.mount("/ui", StaticFiles(directory=UI_DIR), name="ui")
|
||||||
|
|
||||||
|
|
||||||
@app.get("/")
|
@app.get("/")
|
||||||
async def read_root():
|
async def read_root():
|
||||||
return FileResponse(os.path.join(UI_DIR, "index.html"))
|
return FileResponse(os.path.join(UI_DIR, "index.html"))
|
||||||
|
|
||||||
|
|
||||||
def run():
|
def run():
|
||||||
# Read port from environment variable, default to 8000
|
# Read port from environment variable, default to 8000
|
||||||
port = int(os.environ.get("PORT", 8000))
|
port = int(os.environ.get("PORT", 8000))
|
||||||
uvicorn.run(app, host="0.0.0.0", port=port)
|
uvicorn.run(app, host="0.0.0.0", port=port)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
run()
|
run()
|
||||||
321
ui/script.js
321
ui/script.js
@@ -1,171 +1,168 @@
|
|||||||
|
document.addEventListener("DOMContentLoaded", () => {
|
||||||
|
const editor = document.getElementById("editor");
|
||||||
|
const suggestionOverlay = document.getElementById("suggestion-overlay");
|
||||||
|
const status = document.getElementById("status");
|
||||||
|
const statusIndicator = document.querySelector(".status-indicator");
|
||||||
|
|
||||||
document.addEventListener('DOMContentLoaded', () => {
|
// Controls
|
||||||
const editor = document.getElementById('editor');
|
const nGramSelect = document.getElementById("n-gram");
|
||||||
const suggestionOverlay = document.getElementById('suggestion-overlay');
|
const nValDisplay = document.getElementById("n-val");
|
||||||
const status = document.getElementById('status');
|
const tempInput = document.getElementById("temperature");
|
||||||
const statusIndicator = document.querySelector('.status-indicator');
|
const tempValDisplay = document.getElementById("temp-val");
|
||||||
|
const lengthInput = document.getElementById("length");
|
||||||
|
const lengthValDisplay = document.getElementById("length-val");
|
||||||
|
const generateBtn = document.getElementById("generate-more-btn");
|
||||||
|
const sidebarToggle = document.getElementById("sidebar-toggle");
|
||||||
|
const sidebar = document.getElementById("sidebar");
|
||||||
|
const acceptSuggestionBtn = document.getElementById("accept-suggestion-btn");
|
||||||
|
|
||||||
// Controls
|
let currentSuggestion = "";
|
||||||
const nGramSelect = document.getElementById('n-gram');
|
let isFetching = false;
|
||||||
const nValDisplay = document.getElementById('n-val');
|
let debounceTimer;
|
||||||
const tempInput = document.getElementById('temperature');
|
|
||||||
const tempValDisplay = document.getElementById('temp-val');
|
|
||||||
const lengthInput = document.getElementById('length');
|
|
||||||
const lengthValDisplay = document.getElementById('length-val');
|
|
||||||
const generateBtn = document.getElementById('generate-more-btn');
|
|
||||||
const sidebarToggle = document.getElementById('sidebar-toggle');
|
|
||||||
const sidebar = document.getElementById('sidebar');
|
|
||||||
const acceptSuggestionBtn = document.getElementById('accept-suggestion-btn');
|
|
||||||
|
|
||||||
let currentSuggestion = '';
|
// --- UI Logic ---
|
||||||
let isFetching = false;
|
|
||||||
let debounceTimer;
|
|
||||||
|
|
||||||
// --- UI Logic ---
|
const updateUI = () => {
|
||||||
|
nValDisplay.textContent = nGramSelect.value;
|
||||||
|
tempValDisplay.textContent = tempInput.value;
|
||||||
|
lengthValDisplay.textContent = lengthInput.value;
|
||||||
|
};
|
||||||
|
|
||||||
const updateUI = () => {
|
sidebarToggle.addEventListener("click", () => {
|
||||||
nValDisplay.textContent = nGramSelect.value;
|
sidebar.classList.toggle("open");
|
||||||
tempValDisplay.textContent = tempInput.value;
|
});
|
||||||
lengthValDisplay.textContent = lengthInput.value;
|
|
||||||
};
|
|
||||||
|
|
||||||
sidebarToggle.addEventListener('click', () => {
|
const closeSidebarOnMobile = () => {
|
||||||
sidebar.classList.toggle('open');
|
if (window.innerWidth <= 768) {
|
||||||
});
|
sidebar.classList.remove("open");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
const closeSidebarOnMobile = () => {
|
tempInput.addEventListener("input", updateUI);
|
||||||
if (window.innerWidth <= 768) {
|
lengthInput.addEventListener("input", updateUI);
|
||||||
sidebar.classList.remove('open');
|
nGramSelect.addEventListener("change", () => {
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
tempInput.addEventListener('input', updateUI);
|
|
||||||
lengthInput.addEventListener('input', updateUI);
|
|
||||||
nGramSelect.addEventListener('change', () => {
|
|
||||||
updateUI();
|
|
||||||
triggerUpdate();
|
|
||||||
});
|
|
||||||
|
|
||||||
const triggerUpdate = () => {
|
|
||||||
currentSuggestion = '';
|
|
||||||
updateSuggestion();
|
|
||||||
const prompt = editor.value;
|
|
||||||
if (prompt.trim().length > 0) fetchPrediction(prompt);
|
|
||||||
};
|
|
||||||
|
|
||||||
tempInput.addEventListener('change', () => {
|
|
||||||
triggerUpdate();
|
|
||||||
// Optional: close sidebar on change if on mobile
|
|
||||||
// closeSidebarOnMobile();
|
|
||||||
});
|
|
||||||
|
|
||||||
lengthInput.addEventListener('change', () => {
|
|
||||||
triggerUpdate();
|
|
||||||
});
|
|
||||||
|
|
||||||
// --- Core Functions ---
|
|
||||||
|
|
||||||
const fetchPrediction = async (prompt, customLength = null) => {
|
|
||||||
if (isFetching) return;
|
|
||||||
|
|
||||||
isFetching = true;
|
|
||||||
status.textContent = 'Thinking...';
|
|
||||||
statusIndicator.classList.add('fetching');
|
|
||||||
|
|
||||||
const n = parseInt(nGramSelect.value);
|
|
||||||
const temperature = parseFloat(tempInput.value);
|
|
||||||
const length = customLength || parseInt(lengthInput.value);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await fetch('/api/predict', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ prompt, n, temperature, length }),
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!response.ok) throw new Error('Network response failed');
|
|
||||||
|
|
||||||
const data = await response.json();
|
|
||||||
|
|
||||||
if (customLength) {
|
|
||||||
insertText(data.prediction || '');
|
|
||||||
} else {
|
|
||||||
currentSuggestion = data.prediction || '';
|
|
||||||
updateSuggestion();
|
|
||||||
}
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Prediction failed:', error);
|
|
||||||
status.textContent = 'Error';
|
|
||||||
} finally {
|
|
||||||
isFetching = false;
|
|
||||||
status.textContent = 'Idle';
|
|
||||||
statusIndicator.classList.remove('fetching');
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const updateSuggestion = () => {
|
|
||||||
const editorText = editor.value;
|
|
||||||
const space = (editorText.length > 0 && !/\s$/.test(editorText)) ? ' ' : '';
|
|
||||||
suggestionOverlay.textContent = editorText + space + currentSuggestion;
|
|
||||||
|
|
||||||
// Show/hide accept button
|
|
||||||
if (currentSuggestion) {
|
|
||||||
acceptSuggestionBtn.classList.add('visible');
|
|
||||||
} else {
|
|
||||||
acceptSuggestionBtn.classList.remove('visible');
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const insertText = (text) => {
|
|
||||||
if (!text) return;
|
|
||||||
const space = (editor.value.length > 0 && !/\s$/.test(editor.value)) ? ' ' : '';
|
|
||||||
editor.value += space + text;
|
|
||||||
currentSuggestion = '';
|
|
||||||
updateSuggestion();
|
|
||||||
|
|
||||||
// Ensure the editor scrolls with content
|
|
||||||
editor.scrollTop = editor.scrollHeight;
|
|
||||||
};
|
|
||||||
|
|
||||||
// --- Event Handlers ---
|
|
||||||
|
|
||||||
editor.addEventListener('input', () => {
|
|
||||||
clearTimeout(debounceTimer);
|
|
||||||
currentSuggestion = '';
|
|
||||||
updateSuggestion();
|
|
||||||
|
|
||||||
const prompt = editor.value;
|
|
||||||
if (prompt.trim().length === 0) return;
|
|
||||||
debounceTimer = setTimeout(() => fetchPrediction(prompt), 300);
|
|
||||||
});
|
|
||||||
|
|
||||||
editor.addEventListener('keydown', (e) => {
|
|
||||||
if (e.key === 'Tab' && currentSuggestion) {
|
|
||||||
e.preventDefault();
|
|
||||||
insertText(currentSuggestion);
|
|
||||||
fetchPrediction(editor.value);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
acceptSuggestionBtn.addEventListener('click', () => {
|
|
||||||
if (currentSuggestion) {
|
|
||||||
insertText(currentSuggestion);
|
|
||||||
fetchPrediction(editor.value);
|
|
||||||
editor.focus();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
generateBtn.addEventListener('click', () => {
|
|
||||||
fetchPrediction(editor.value, 50);
|
|
||||||
closeSidebarOnMobile();
|
|
||||||
});
|
|
||||||
|
|
||||||
// Sync scroll
|
|
||||||
editor.addEventListener('scroll', () => {
|
|
||||||
suggestionOverlay.scrollTop = editor.scrollTop;
|
|
||||||
});
|
|
||||||
|
|
||||||
// Initialize UI badges
|
|
||||||
updateUI();
|
updateUI();
|
||||||
|
triggerUpdate();
|
||||||
|
});
|
||||||
|
|
||||||
|
const triggerUpdate = () => {
|
||||||
|
currentSuggestion = "";
|
||||||
|
updateSuggestion();
|
||||||
|
const prompt = editor.value;
|
||||||
|
if (prompt.trim().length > 0) fetchPrediction(prompt);
|
||||||
|
};
|
||||||
|
|
||||||
|
tempInput.addEventListener("change", () => {
|
||||||
|
triggerUpdate();
|
||||||
|
});
|
||||||
|
|
||||||
|
lengthInput.addEventListener("change", () => {
|
||||||
|
triggerUpdate();
|
||||||
|
});
|
||||||
|
|
||||||
|
// --- Core Functions ---
|
||||||
|
|
||||||
|
const fetchPrediction = async (prompt, customLength = null) => {
|
||||||
|
if (isFetching) return;
|
||||||
|
|
||||||
|
isFetching = true;
|
||||||
|
status.textContent = "Thinking...";
|
||||||
|
statusIndicator.classList.add("fetching");
|
||||||
|
|
||||||
|
const n = parseInt(nGramSelect.value);
|
||||||
|
const temperature = parseFloat(tempInput.value);
|
||||||
|
const length = customLength || parseInt(lengthInput.value);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch("/api/predict", {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "Content-Type": "application/json" },
|
||||||
|
body: JSON.stringify({ prompt, n, temperature, length }),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) throw new Error("Network response failed");
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
|
||||||
|
if (customLength) {
|
||||||
|
insertText(data.prediction || "");
|
||||||
|
} else {
|
||||||
|
currentSuggestion = data.prediction || "";
|
||||||
|
updateSuggestion();
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Prediction failed:", error);
|
||||||
|
status.textContent = "Error";
|
||||||
|
} finally {
|
||||||
|
isFetching = false;
|
||||||
|
status.textContent = "Idle";
|
||||||
|
statusIndicator.classList.remove("fetching");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const updateSuggestion = () => {
|
||||||
|
const editorText = editor.value;
|
||||||
|
const space = editorText.length > 0 && !/\s$/.test(editorText) ? " " : "";
|
||||||
|
suggestionOverlay.textContent = editorText + space + currentSuggestion;
|
||||||
|
|
||||||
|
// Show/hide accept button
|
||||||
|
if (currentSuggestion) {
|
||||||
|
acceptSuggestionBtn.classList.add("visible");
|
||||||
|
} else {
|
||||||
|
acceptSuggestionBtn.classList.remove("visible");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const insertText = (text) => {
|
||||||
|
if (!text) return;
|
||||||
|
const space =
|
||||||
|
editor.value.length > 0 && !/\s$/.test(editor.value) ? " " : "";
|
||||||
|
editor.value += space + text;
|
||||||
|
currentSuggestion = "";
|
||||||
|
updateSuggestion();
|
||||||
|
|
||||||
|
// Ensure the editor scrolls with content
|
||||||
|
editor.scrollTop = editor.scrollHeight;
|
||||||
|
};
|
||||||
|
|
||||||
|
// --- Event Handlers ---
|
||||||
|
|
||||||
|
editor.addEventListener("input", () => {
|
||||||
|
clearTimeout(debounceTimer);
|
||||||
|
currentSuggestion = "";
|
||||||
|
updateSuggestion();
|
||||||
|
|
||||||
|
const prompt = editor.value;
|
||||||
|
if (prompt.trim().length === 0) return;
|
||||||
|
debounceTimer = setTimeout(() => fetchPrediction(prompt), 300);
|
||||||
|
});
|
||||||
|
|
||||||
|
editor.addEventListener("keydown", (e) => {
|
||||||
|
if (e.key === "Tab" && currentSuggestion) {
|
||||||
|
e.preventDefault();
|
||||||
|
insertText(currentSuggestion);
|
||||||
|
fetchPrediction(editor.value);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
acceptSuggestionBtn.addEventListener("click", () => {
|
||||||
|
if (currentSuggestion) {
|
||||||
|
insertText(currentSuggestion);
|
||||||
|
fetchPrediction(editor.value);
|
||||||
|
editor.focus();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
generateBtn.addEventListener("click", () => {
|
||||||
|
fetchPrediction(editor.value, 50);
|
||||||
|
closeSidebarOnMobile();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Sync scroll - FIX: Use transform instead of scrollTop
|
||||||
|
editor.addEventListener("scroll", () => {
|
||||||
|
suggestionOverlay.style.transform = `translateY(-${editor.scrollTop}px)`;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Initialize UI badges
|
||||||
|
updateUI();
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -369,7 +369,7 @@ label {
|
|||||||
border: 1px solid var(--border);
|
border: 1px solid var(--border);
|
||||||
border-radius: var(--radius);
|
border-radius: var(--radius);
|
||||||
background-color: var(--card);
|
background-color: var(--card);
|
||||||
overflow-y: auto;
|
overflow: hidden;
|
||||||
}
|
}
|
||||||
|
|
||||||
#editor,
|
#editor,
|
||||||
@@ -402,6 +402,7 @@ label {
|
|||||||
color: var(--foreground);
|
color: var(--foreground);
|
||||||
outline: none;
|
outline: none;
|
||||||
resize: none;
|
resize: none;
|
||||||
|
overflow-y: auto;
|
||||||
}
|
}
|
||||||
|
|
||||||
#suggestion-overlay {
|
#suggestion-overlay {
|
||||||
@@ -412,4 +413,6 @@ label {
|
|||||||
color: var(--muted-foreground);
|
color: var(--muted-foreground);
|
||||||
pointer-events: none;
|
pointer-events: none;
|
||||||
opacity: 0.5;
|
opacity: 0.5;
|
||||||
|
overflow: hidden;
|
||||||
|
transition: transform 0.05s linear;
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user