require (
github.com/google/uuid v1.6.0 // indirect
+ github.com/gorilla/websocket v1.5.3 // indirect
golang.org/x/sys v0.17.0 // indirect
)
github.com/golang-jwt/jwt/v5 v5.2.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
+github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo=
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y=
"encoding/json"
"fmt"
"net/http"
+ "strings"
"sync"
"time"
// Convert from utils.MigrationProgress to handlers.MigrationProgress
migrationProgress[req.Username] = MigrationProgress{
Phase: progress.Phase,
- CurrentItem: progress.CurrentItem,
ProcessedItems: progress.ProcessedItems,
TotalItems: progress.TotalItems,
+ ErrorCount: progress.ErrorCount,
}
migrationProgressMutex.Unlock()
}
return
}
- utils.Logger.Printf("Migration completed for user '%s'", req.Username)
-
// Mark migration as completed
activeMigrationsMutex.Lock()
activeMigrations[req.Username] = false
continue
}
- if username_from_file, ok := user["username"].(string); ok && username_from_file == username {
+ if username_from_file, ok := user["username"].(string); ok && strings.EqualFold(username_from_file, username) {
utils.Logger.Printf("Registration failed. Username '%s' already exists", username)
return false, fmt.Errorf("username already exists: %d", http.StatusBadRequest)
}
// MigrationProgress stores the progress of user data migration
type MigrationProgress struct {
Phase string `json:"phase"` // Current migration phase
- CurrentItem string `json:"current_item"` // Current item being migrated
ProcessedItems int `json:"processed_items"` // Number of items processed
TotalItems int `json:"total_items"` // Total number of items to process
+ ErrorCount int `json:"error_count"` // Number of errors encountered during migration
}
// migrationProgress keeps track of migration progress for all users
var activeMigrations = make(map[string]bool)
var activeMigrationsMutex sync.RWMutex
-// CheckMigrationProgress checks the progress of a user migration
-func CheckMigrationProgress(w http.ResponseWriter, r *http.Request) {
- // Get username from query parameters
- username := r.URL.Query().Get("username")
- if username == "" {
- http.Error(w, "Username is required", http.StatusBadRequest)
- return
- }
-
- // Get progress
- migrationProgressMutex.Lock()
- progress, exists := migrationProgress[username]
- migrationProgressMutex.Unlock()
-
- if !exists {
- utils.JSONResponse(w, http.StatusOK, map[string]interface{}{
- "progress": 0,
- "status": "not_started",
- })
- return
- }
-
- // Return progress
- status := "in_progress"
- if progress.TotalItems > 0 && progress.ProcessedItems >= progress.TotalItems {
- status = "completed"
- }
-
- utils.JSONResponse(w, http.StatusOK, map[string]interface{}{
- "progress": progress,
- "status": status,
- })
-}
-
// GetMigrationProgress returns the migration progress for a user
func GetMigrationProgress(w http.ResponseWriter, r *http.Request) {
- // Parse the request body
- var req struct {
- Username string `json:"username"`
- }
- if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
- http.Error(w, "Invalid request body", http.StatusBadRequest)
+ username := r.URL.Query().Get("username")
+ if username == "" {
+ utils.Logger.Printf("username: %s", username)
+ http.Error(w, "Unauthorized", http.StatusUnauthorized)
return
}
// Get migration progress
migrationProgressMutex.Lock()
- progress, exists := migrationProgress[req.Username]
+ progress, exists := migrationProgress[username]
migrationProgressMutex.Unlock()
// Check if migration is actually active
activeMigrationsMutex.RLock()
- isActive := activeMigrations[req.Username]
+ isActive := activeMigrations[username]
activeMigrationsMutex.RUnlock()
if !exists {
- utils.JSONResponse(w, http.StatusOK, map[string]interface{}{
+ utils.JSONResponse(w, http.StatusOK, map[string]any{
"migration_in_progress": false,
- "status": "not_started",
+ "progress": map[string]string{"phase": "not_started"},
})
return
}
// Check if migration is completed
- migrationCompleted := progress.Phase == "completed" || (progress.ProcessedItems >= progress.TotalItems && progress.TotalItems > 0)
+ migrationCompleted := progress.Phase == "completed"
- // Return progress
- status := "in_progress"
- if migrationCompleted {
- status = "completed"
- } else if !isActive {
- // If migration is not active but not completed, it might have failed
- status = "failed"
- }
-
- utils.JSONResponse(w, http.StatusOK, map[string]interface{}{
+ utils.JSONResponse(w, http.StatusOK, map[string]any{
"migration_in_progress": isActive && !migrationCompleted,
"progress": progress,
- "status": status,
})
}
// Register routes
mux.HandleFunc("POST /users/login", handlers.Login)
+ mux.HandleFunc("GET /users/migrationWebSocket", handlers.HandleMigrationWebSocket)
+ mux.HandleFunc("GET /users/migrationProgress", handlers.GetMigrationProgress)
mux.HandleFunc("GET /users/isRegistrationAllowed", handlers.IsRegistrationAllowed)
mux.HandleFunc("POST /users/register", handlers.RegisterHandler)
mux.HandleFunc("GET /users/logout", handlers.Logout)
mux.HandleFunc("GET /users/check", middleware.RequireAuth(handlers.CheckLogin))
mux.HandleFunc("GET /users/getUserSettings", middleware.RequireAuth(handlers.GetUserSettings))
mux.HandleFunc("POST /users/saveUserSettings", middleware.RequireAuth(handlers.SaveUserSettings))
- mux.HandleFunc("POST /users/migrationProgress", handlers.GetMigrationProgress)
mux.HandleFunc("POST /logs/saveLog", middleware.RequireAuth(handlers.SaveLog))
mux.HandleFunc("GET /logs/getLog", middleware.RequireAuth(handlers.GetLog))
package utils
import (
- "crypto/cipher"
"crypto/rand"
- "encoding/base64"
"encoding/json"
"fmt"
"io"
"strconv"
"strings"
"sync"
-
- "golang.org/x/crypto/chacha20poly1305"
)
// Mutexes für Dateizugriffe
return months, nil
}
-
-// CreateAEAD creates an AEAD cipher for encryption/decryption
-func CreateAEAD(key []byte) (cipher.AEAD, error) {
- return chacha20poly1305.New(key)
-}
-
-// EncryptText encrypts text using the provided key
-func EncryptText(text, key string) (string, error) {
- // Decode key
- keyBytes, err := base64.URLEncoding.DecodeString(key)
- if err != nil {
- return "", fmt.Errorf("error decoding key: %v", err)
- }
-
- // Create AEAD cipher
- aead, err := chacha20poly1305.New(keyBytes)
- if err != nil {
- return "", fmt.Errorf("error creating cipher: %v", err)
- }
-
- // Create nonce
- nonce := make([]byte, aead.NonceSize())
- if _, err := io.ReadFull(rand.Reader, nonce); err != nil {
- return "", fmt.Errorf("error creating nonce: %v", err)
- }
-
- // Encrypt text
- ciphertext := aead.Seal(nonce, nonce, []byte(text), nil)
- return base64.URLEncoding.EncodeToString(ciphertext), nil
-}
-
-// DecryptText decrypts text using the provided key
-func DecryptText(ciphertext, key string) (string, error) {
- // Decode key and ciphertext
- keyBytes, err := base64.URLEncoding.DecodeString(key)
- if err != nil {
- return "", fmt.Errorf("error decoding key: %v", err)
- }
-
- ciphertextBytes, err := base64.URLEncoding.DecodeString(ciphertext)
- if err != nil {
- return "", fmt.Errorf("error decoding ciphertext: %v", err)
- }
-
- // Create AEAD cipher
- aead, err := chacha20poly1305.New(keyBytes)
- if err != nil {
- return "", fmt.Errorf("error creating cipher: %v", err)
- }
-
- // Extract nonce from ciphertext
- if len(ciphertextBytes) < aead.NonceSize() {
- return "", fmt.Errorf("ciphertext too short")
- }
- nonce, ciphertextBytes := ciphertextBytes[:aead.NonceSize()], ciphertextBytes[aead.NonceSize():]
-
- // Decrypt text
- plaintext, err := aead.Open(nil, nonce, ciphertextBytes, nil)
- if err != nil {
- return "", fmt.Errorf("error decrypting ciphertext: %v", err)
- }
-
- return string(plaintext), nil
-}
-
-// EncryptFile encrypts a file using the provided key
-func EncryptFile(data []byte, key string) ([]byte, error) {
- // Decode key
- keyBytes, err := base64.URLEncoding.DecodeString(key)
- if err != nil {
- return nil, fmt.Errorf("error decoding key: %v", err)
- }
-
- // Create AEAD cipher
- aead, err := chacha20poly1305.New(keyBytes)
- if err != nil {
- return nil, fmt.Errorf("error creating cipher: %v", err)
- }
-
- // Create nonce
- nonce := make([]byte, aead.NonceSize())
- if _, err := io.ReadFull(rand.Reader, nonce); err != nil {
- return nil, fmt.Errorf("error creating nonce: %v", err)
- }
-
- // Encrypt file
- ciphertext := aead.Seal(nonce, nonce, data, nil)
- return ciphertext, nil
-}
-
-// DecryptFile decrypts a file using the provided key
-func DecryptFile(ciphertext []byte, key string) ([]byte, error) {
- // Decode key
- keyBytes, err := base64.URLEncoding.DecodeString(key)
- if err != nil {
- return nil, fmt.Errorf("error decoding key: %v", err)
- }
-
- // Create AEAD cipher
- aead, err := chacha20poly1305.New(keyBytes)
- if err != nil {
- return nil, fmt.Errorf("error creating cipher: %v", err)
- }
-
- // Extract nonce from ciphertext
- if len(ciphertext) < aead.NonceSize() {
- return nil, fmt.Errorf("ciphertext too short")
- }
- nonce, ciphertext := ciphertext[:aead.NonceSize()], ciphertext[aead.NonceSize():]
-
- // Decrypt file
- plaintext, err := aead.Open(nil, nonce, ciphertext, nil)
- if err != nil {
- return nil, fmt.Errorf("error decrypting ciphertext: %v", err)
- }
-
- return plaintext, nil
-}
-
-// GetEncryptionKey retrieves the encryption key for a specific user
-func GetEncryptionKey(userID int, derivedKey string) (string, error) {
- // Get users
- users, err := GetUsers()
- if err != nil {
- return "", fmt.Errorf("error retrieving users: %v", err)
- }
-
- // Find user
- usersList, ok := users["users"].([]any)
- if !ok {
- return "", fmt.Errorf("users.json is not in the correct format")
- }
-
- for _, u := range usersList {
- user, ok := u.(map[string]any)
- if !ok {
- continue
- }
-
- if id, ok := user["user_id"].(float64); ok && int(id) == userID {
- encEncKey, ok := user["enc_enc_key"].(string)
- if !ok {
- return "", fmt.Errorf("user data is not in the correct format")
- }
-
- // Decode derived key
- derivedKeyBytes, err := base64.StdEncoding.DecodeString(derivedKey)
- if err != nil {
- return "", fmt.Errorf("error decoding derived key: %v", err)
- }
-
- // Create Fernet cipher
- aead, err := CreateAEAD(derivedKeyBytes)
- if err != nil {
- return "", fmt.Errorf("error creating cipher: %v", err)
- }
-
- // Decode encrypted key
- encEncKeyBytes, err := base64.StdEncoding.DecodeString(encEncKey)
- if err != nil {
- return "", fmt.Errorf("error decoding encrypted key: %v", err)
- }
-
- // Extract nonce from encrypted key
- if len(encEncKeyBytes) < aead.NonceSize() {
- return "", fmt.Errorf("encrypted key too short")
- }
- nonce, encKeyBytes := encEncKeyBytes[:aead.NonceSize()], encEncKeyBytes[aead.NonceSize():]
-
- // Decrypt key
- keyBytes, err := aead.Open(nil, nonce, encKeyBytes, nil)
- if err != nil {
- return "", fmt.Errorf("error decrypting key: %v", err)
- }
-
- // Return base64-encoded key
- return base64.URLEncoding.EncodeToString(keyBytes), nil
- }
- }
-
- return "", fmt.Errorf("user not found")
-}
"encoding/base64"
"encoding/json"
"fmt"
- "net/http"
"os"
"path/filepath"
"strconv"
"time"
)
-// Mutexes für Dateizugriffe
+// Mutexes for file access
var (
- activeMigrationsMutex sync.RWMutex // Für die Map der aktiven Migrationen
- oldUsersFileMutex sync.RWMutex // Für old/users.json
- templatesMutex sync.RWMutex // Für templates.json
- tagsMutex sync.RWMutex // Für tags.json
- logsMutex sync.RWMutex // Für Logs
- filesMutex sync.RWMutex // Für Dateien im files-Verzeichnis
+ activeMigrationsMutex sync.RWMutex // For the map of active migrations
+ oldUsersFileMutex sync.RWMutex // For old/users.json
+ templatesMutex sync.RWMutex // For templates.json
+ logsMutex sync.RWMutex // For logs
+ filesMutex sync.RWMutex // For files in the files directory
)
-// Map zur Verfolgung aktiver Migrationen (username -> bool)
+// Map to track active migrations (username -> bool)
var activeMigrations = make(map[string]bool)
-// IsUserMigrating prüft, ob für einen Benutzer bereits eine Migration läuft
+// IsUserMigrating checks if a migration is already in progress for a user
func IsUserMigrating(username string) bool {
activeMigrationsMutex.RLock()
defer activeMigrationsMutex.RUnlock()
return activeMigrations[username]
}
-// SetUserMigrating markiert einen Benutzer als migrierend oder nicht migrierend
+// SetUserMigrating marks a user as migrating or not migrating
func SetUserMigrating(username string, migrating bool) {
activeMigrationsMutex.Lock()
defer activeMigrationsMutex.Unlock()
// MigrateUserData migrates a user's data from the old format to the new format
func MigrateUserData(username, password string, registerFunc RegisterUserFunc, progressChan chan<- MigrationProgress) error {
- // Prüfen, ob bereits eine Migration für diesen Benutzer läuft
+ // Check if a migration is already in progress for this user
if IsUserMigrating(username) {
Logger.Printf("Migration for user %s is already in progress", username)
return fmt.Errorf("migration already in progress for user %s", username)
}
- // Benutzer als migrierend markieren
+ // Mark user as migrating
SetUserMigrating(username, true)
- // Sicherstellen, dass der Benutzer am Ende nicht mehr als migrierend markiert ist
+ // Ensure the user is no longer marked as migrating when this function ends
defer SetUserMigrating(username, false)
+ // Initialize migration progress
+ currentProgress := MigrationProgress{
+ Phase: "creating_new_user",
+ ProcessedItems: 0,
+ ErrorCount: 0,
+ }
+
+ if progressChan != nil {
+ progressChan <- currentProgress // Send initial progress
+ }
+
+ // Error handling function for consistent error handling
+ handleError := func(errMsg string, err error) error {
+ errorMessage := fmt.Sprintf("%s: %v", errMsg, err)
+ Logger.Printf("Migration error for user %s: %s", username, errorMessage)
+
+ // Send final update with Success=false
+ return fmt.Errorf("%s: %v", errMsg, err)
+ }
+
start := time.Now()
- Logger.Printf("Starting migration for user %s with password %s", username, password)
+ Logger.Printf("Starting migration for user %s", username)
// Get old users
oldUsersFileMutex.RLock()
oldUsersFileMutex.RUnlock()
if err != nil {
- return fmt.Errorf("error reading old users: %v", err)
+ return handleError("Error reading old users", err)
}
// Parse old users
var oldUsers map[string]any
if err := json.Unmarshal(oldUsersBytes, &oldUsers); err != nil {
- return fmt.Errorf("error parsing old users: %v", err)
+ return handleError("Error parsing old users", err)
}
// Find the old user by username
}
if oldUser == nil {
- return fmt.Errorf("user %s not found in old data", username)
+ return handleError(fmt.Sprintf("User %s not found in old data", username), nil)
}
oldUserID = int(oldUser["user_id"].(float64))
-
Logger.Printf("Found old user ID: %d", oldUserID)
- // Update progress
- if progressChan != nil {
- progressChan <- MigrationProgress{
- Phase: "creating_new_user",
- CurrentItem: "",
- ProcessedItems: 1,
- TotalItems: 5,
- }
- }
-
// Verify username matches
oldUsername, ok := oldUser["username"].(string)
if !ok || oldUsername != username {
- return fmt.Errorf("username mismatch: expected %s, got %s", username, oldUsername)
+ return handleError(fmt.Sprintf("Username mismatch: expected %s, got %s", username, oldUsername), nil)
}
// Get encryption related data from old user
oldSalt, ok := oldUser["salt"].(string)
if !ok {
- return fmt.Errorf("old user data is missing salt")
+ return handleError("Old user data is missing salt", nil)
}
oldEncEncKey, ok := oldUser["enc_enc_key"].(string)
if !ok {
- return fmt.Errorf("old user data is missing encrypted key")
+ return handleError("Old user data is missing encrypted key", nil)
}
// Derive key from password and salt
oldDerivedKey := DeriveKeyFromOldPassword(password, oldSalt)
_, err = base64.StdEncoding.DecodeString(base64.URLEncoding.EncodeToString(oldDerivedKey))
if err != nil {
- return fmt.Errorf("error decoding old derived key: %v", err)
+ return handleError("Error decoding old derived key", err)
}
// Decode the old encrypted key (just for validation)
_, err = base64.URLEncoding.DecodeString(oldEncEncKey)
if err != nil {
- return fmt.Errorf("error decoding old encrypted key: %v", err)
+ return handleError("Error decoding old encrypted key", err)
}
// Decrypt the old encryption key
oldEncKey, err := FernetDecrypt(oldEncEncKey, oldDerivedKey)
if err != nil {
- return fmt.Errorf("error decrypting old encryption key: %v", err)
+ return handleError("Error decrypting old encryption key", err)
}
- // Debug: Zeige den Schlüssel
- fmt.Printf("Old encryption key: %s\n", oldEncKey)
-
- // Registriere den Benutzer mit der übergebenen Funktion
+ // Register the user with the provided function
success, err := registerFunc(username, password)
if err != nil {
- return fmt.Errorf("error registering new user: %v", err)
+ return handleError("Error registering new user", err)
}
if !success {
- return fmt.Errorf("failed to register new user")
+ return handleError("Failed to register new user", nil)
}
users, err := GetUsers()
if err != nil {
- return fmt.Errorf("error getting users: %v", err)
+ return handleError("Error getting users", err)
}
+
// Find the new user ID
newUserID := 0
newDerivedKey := ""
// Verify password
if !VerifyPassword(password, u["password"].(string), u["salt"].(string)) {
- Logger.Printf("Login failed. Password for user '%s' is incorrect", username)
- return fmt.Errorf("user/Password combination not found: %d", http.StatusNotFound)
+ return handleError(fmt.Sprintf("Login failed. Password for user '%s' is incorrect", username), nil)
}
// Get intermediate key
derivedKey, err := DeriveKeyFromPassword(password, u["salt"].(string))
if err != nil {
- return fmt.Errorf("internal Server Error: %d", http.StatusInternalServerError)
+ return handleError("Internal Server Error", err)
}
newDerivedKey = base64.StdEncoding.EncodeToString(derivedKey)
}
}
if newUserID <= 0 {
- return fmt.Errorf("new user ID not found for username: %s", username)
- }
-
- fmt.Printf("New derived key: %s\n", newDerivedKey)
-
- // Update progress
- if progressChan != nil {
- progressChan <- MigrationProgress{
- Phase: "writing_user_data",
- ProcessedItems: 3,
- TotalItems: 5,
- }
+ return handleError(fmt.Sprintf("New user ID not found for username: %s", username), nil)
}
// Now migrate all the data
// Create new data directory
if err := os.MkdirAll(newDataDir, 0755); err != nil {
- return fmt.Errorf("error creating new data directory: %v", err)
+ return handleError("Error creating new data directory", err)
}
encKey, err := GetEncryptionKey(newUserID, string(newDerivedKey))
if err != nil {
- return fmt.Errorf("error getting encryption key: %v", err)
+ return handleError("Error getting encryption key", err)
}
- fmt.Printf("New encryption key: %s\n", encKey)
+ time.Sleep(800 * time.Millisecond) // Simulate some delay for migration
// Migrate templates
- if err := migrateTemplates(oldDataDir, newDataDir, oldEncKey, encKey, progressChan); err != nil {
- return fmt.Errorf("error migrating templates: %v", err)
+ if err := migrateTemplates(oldDataDir, newDataDir, oldEncKey, encKey, ¤tProgress, progressChan); err != nil {
+ return handleError("Error migrating templates", err)
}
+ time.Sleep(800 * time.Millisecond) // Simulate some delay for migration
+
// Migrate logs (years/months)
- if err := migrateLogs(oldDataDir, newDataDir, oldEncKey, encKey, progressChan); err != nil {
- return fmt.Errorf("error migrating logs: %v", err)
+ if err := migrateLogs(oldDataDir, newDataDir, oldEncKey, encKey, ¤tProgress, progressChan); err != nil {
+ return handleError("Error migrating logs", err)
}
// Migrate files
- if err := migrateFiles(filepath.Join(Settings.DataPath, "old", "files"), newDataDir, oldEncKey, encKey, progressChan); err != nil {
- return fmt.Errorf("error migrating files: %v", err)
+ if err := migrateFiles(filepath.Join(Settings.DataPath, "old", "files"), newDataDir, oldEncKey, encKey, ¤tProgress, progressChan); err != nil {
+ return handleError("Error migrating files", err)
}
// Set final progress
+ currentProgress.Phase = "completed"
+ currentProgress.ProcessedItems = 0
+ currentProgress.TotalItems = 0
+
if progressChan != nil {
- progressChan <- MigrationProgress{
- Phase: "completed",
- ProcessedItems: 5,
- TotalItems: 5,
- }
+ progressChan <- currentProgress // Send final progress update
}
Logger.Printf("Migration completed for user %s (Old ID: %d, New ID: %d) after %v", username, oldUserID, newUserID, time.Since(start))
// MigrationProgress contains information about the migration progress
type MigrationProgress struct {
Phase string `json:"phase"` // Current migration phase
- CurrentItem string `json:"current_item"` // Current item being migrated
ProcessedItems int `json:"processed_items"` // Number of already processed items
TotalItems int `json:"total_items"` // Total number of items to migrate
+ ErrorCount int `json:"error_count"` // Number of errors encountered during migration
}
// RegisterUserFunc is a function type for user registration
// Helper functions for migration
-func migrateTemplates(oldDir, newDir string, oldKey string, newKey string, progressChan chan<- MigrationProgress) error {
+func migrateTemplates(oldDir, newDir string, oldKey string, newKey string, progress *MigrationProgress, progressChan chan<- MigrationProgress) error {
// Check if old templates exist
templatesMutex.RLock()
oldTemplatesPath := filepath.Join(oldDir, "templates.json")
}
// Update progress
+ progress.Phase = "migrating_templates"
+ progress.ProcessedItems = 0
+ progress.TotalItems = 1 // Just one template file to migrate
+
+ // Send initial progress update
if progressChan != nil {
- progressChan <- MigrationProgress{
- Phase: "migrating_templates",
- ProcessedItems: 1,
- TotalItems: 2,
- }
+ progressChan <- *progress
}
// Read old templates
file.Close()
templatesMutex.Unlock()
+ // Update progress and send final update
+ progress.ProcessedItems = 1
+ if progressChan != nil {
+ progressChan <- *progress
+ }
+
return nil
}
-func migrateLogs(oldDir, newDir string, oldKey string, newKey string, progressChan chan<- MigrationProgress) error {
+func migrateLogs(oldDir, newDir string, oldKey string, newKey string, progress *MigrationProgress, progressChan chan<- MigrationProgress) error {
// Count all month files in all year directories
var allMonthFiles []struct {
yearDir string
}
// Update progress with total number of months
+ progress.Phase = "migrating_logs"
+ progress.ProcessedItems = 0
+ progress.TotalItems = totalMonths
+
+ // Send initial progress update
if progressChan != nil {
- progressChan <- MigrationProgress{
- Phase: "migrating_logs",
- ProcessedItems: 0,
- TotalItems: totalMonths,
- }
+ progressChan <- *progress
}
processedMonths := 0
// Process all months
for _, monthInfo := range allMonthFiles {
- // Update progress with total number of months
- if progressChan != nil && processedMonths%5 == 0 {
- progressChan <- MigrationProgress{
- Phase: "migrating_logs",
- ProcessedItems: processedMonths,
- TotalItems: totalMonths,
+ // Update progress with number of months
+ progress.ProcessedItems = processedMonths
+
+ // Send progress update every 5 months or at the end
+ if processedMonths%5 == 0 || processedMonths == totalMonths-1 {
+ if progressChan != nil {
+ progressChan <- *progress
}
}
if err != nil {
Logger.Printf("Error reading old month %s: %v", oldMonthPath, err)
+ progress.ErrorCount++
continue
}
var monthData map[string]any
if err := json.Unmarshal(oldMonthBytes, &monthData); err != nil {
Logger.Printf("Error parsing old month %s: %v", oldMonthPath, err)
+ progress.ErrorCount++
continue
}
days, ok := monthData["days"].([]any)
if !ok {
Logger.Printf("Month %s has unexpected format - missing 'days' array", oldMonthPath)
+ progress.ErrorCount++
continue
}
oldKeyBytes, err := base64.URLEncoding.DecodeString(oldKey)
if err != nil {
Logger.Printf("Error decoding oldKey %v", err)
+ progress.ErrorCount++
continue
}
plaintext, err = FernetDecrypt(encryptedText, oldKeyBytes)
if err != nil {
Logger.Printf("Error decrypting content for day %f in %s: %v", day["day"].(float64), oldMonthPath, err)
+ progress.ErrorCount++
continue
}
}
newEncrypted, err := EncryptText(plaintext, newKey)
if err != nil {
Logger.Printf("Error encrypting content for day %d in %s: %v", i, oldMonthPath, err)
+ progress.ErrorCount++
continue
}
newEncrypted, err := EncryptText(dateWritten, newKey)
if err != nil {
Logger.Printf("Error encrypting date_written for day %d in %s: %v", i, oldMonthPath, err)
+ progress.ErrorCount++
continue
}
day["date_written"] = newEncrypted
plaintext, err = FernetDecrypt(encryptedText, oldKeyBytes)
if err != nil {
Logger.Printf("Error decrypting history item %f for day %d in %s: %v", historyItem["version"].(float64), day["day"].(int), oldMonthPath, err)
+ progress.ErrorCount++
continue
}
}
newEncrypted, err := EncryptText(plaintext, newKey)
if err != nil {
Logger.Printf("Error encrypting history item %d for day %d in %s: %v", j, i, oldMonthPath, err)
+ progress.ErrorCount++
continue
}
newEncrypted, err := EncryptText(dateWritten, newKey)
if err != nil {
Logger.Printf("Error encrypting date_written for history item %d in day %d of %s: %v", j, i, oldMonthPath, err)
+ progress.ErrorCount++
continue
}
historyItem["date_written"] = newEncrypted
if err != nil {
logsMutex.Unlock()
Logger.Printf("Error creating directory for %s: %v", newMonthPath, err)
+ progress.ErrorCount++
continue
}
if err != nil {
logsMutex.Unlock()
Logger.Printf("Error creating file %s: %v", newMonthPath, err)
+ progress.ErrorCount++
continue
}
file.Close()
logsMutex.Unlock()
Logger.Printf("Error encoding month data for %s: %v", newMonthPath, err)
+ progress.ErrorCount++
continue
}
logsMutex.Unlock()
processedMonths++
+ time.Sleep(20 * time.Millisecond) // Simulate some delay for migration
+ }
+
+ // Final progress update
+ progress.ProcessedItems = processedMonths
+ if progressChan != nil {
+ progressChan <- *progress
}
return nil
}
-func migrateFiles(oldFilesDir, newDir string, oldKey string, newKey string, progressChan chan<- MigrationProgress) error {
+func migrateFiles(oldFilesDir, newDir string, oldKey string, newKey string, progress *MigrationProgress, progressChan chan<- MigrationProgress) error {
// Check if old files directory exists
filesMutex.RLock()
_, err := os.Stat(oldFilesDir)
filesMutex.Lock()
if err := os.MkdirAll(newFilesDir, 0755); err != nil {
filesMutex.Unlock()
+ progress.ErrorCount++
return fmt.Errorf("error creating new files directory: %v", err)
}
filesMutex.Unlock()
// Convert oldKey from base64 to []byte for decryption
oldKeyBytes, err := base64.URLEncoding.DecodeString(oldKey)
if err != nil {
+ progress.ErrorCount++
return fmt.Errorf("error decoding oldKey: %v", err)
}
yearEntries, err := os.ReadDir(newDir)
logsMutex.RUnlock()
if err != nil {
+ progress.ErrorCount++
return fmt.Errorf("error reading new user directory: %v", err)
}
monthEntries, err := os.ReadDir(yearPath)
logsMutex.RUnlock()
if err != nil {
+ progress.ErrorCount++
Logger.Printf("Error reading year directory %s: %v", yearPath, err)
continue
}
logsMutex.RUnlock()
if err != nil {
Logger.Printf("Error reading month file %s: %v", monthPath, err)
+ progress.ErrorCount++
continue
}
var monthData map[string]any
if err := json.Unmarshal(monthBytes, &monthData); err != nil {
Logger.Printf("Error parsing month data %s: %v", monthPath, err)
+ progress.ErrorCount++
continue
}
totalFiles := len(fileRefs)
Logger.Printf("Found %d files to migrate", totalFiles)
- if totalFiles == 0 {
- if progressChan != nil {
- progressChan <- MigrationProgress{
- Phase: "migrating_files",
- ProcessedItems: 0,
- TotalItems: 0,
- }
- }
- return nil // No files to migrate
- }
+ progress.Phase = "migrating_files"
+ progress.ProcessedItems = 0
+ progress.TotalItems = totalFiles
if progressChan != nil {
- progressChan <- MigrationProgress{
- Phase: "migrating_files",
- ProcessedItems: 0,
- TotalItems: totalFiles,
- }
+ progressChan <- *progress // Send initial progress update
+ }
+
+ if totalFiles == 0 {
+ return nil // No files to migrate
}
// Second pass: migrate each file
fileIDMap := make(map[string]string) // Map original file IDs to new file IDs
for i, fileRef := range fileRefs {
- // Update progress occasionally
- if progressChan != nil && (i%5 == 0 || i == 0) {
- progressChan <- MigrationProgress{
- Phase: "migrating_files",
- ProcessedItems: processedFiles,
- TotalItems: totalFiles,
- }
+ progress.ProcessedItems = processedFiles
+ if progressChan != nil {
+ progressChan <- *progress // Send progress update
}
// Check if we already have a mapping for this file ID
NewUUID, err := GenerateUUID()
if err != nil {
Logger.Printf("Error generating UUID for file %s: %v", fileRef.OrigUUID, err)
+ progress.ErrorCount++
continue
}
filesMutex.RUnlock()
if err != nil {
Logger.Printf("Error reading old file %s: %v", oldFilePath, err)
+ progress.ErrorCount++
continue
}
- // Decrypt file with old key - der Dateiinhalt ist bereits ein Fernet-Token
+ // Decrypt file with old key - the file content is already a Fernet token
plaintext, err := FernetDecrypt(string(oldFileBytes), oldKeyBytes)
if err != nil {
Logger.Printf("Error decrypting file %s: %v", fileRef.OrigUUID, err)
+ progress.ErrorCount++
continue
}
newEncrypted, err := EncryptFile(plaintextBytes, newKey)
if err != nil {
Logger.Printf("Error encrypting file %s: %v", fileRef.OrigUUID, err)
+ progress.ErrorCount++
continue
}
filesMutex.Unlock()
if err != nil {
Logger.Printf("Error writing new file %s: %v", newFilePath, err)
+ progress.ErrorCount++
continue
}
processedFiles++
+
+ // Update progress occasionally
+ if i%5 == 0 || i == len(fileRefs)-1 {
+ progress.ProcessedItems = processedFiles
+ if progressChan != nil {
+ progressChan <- *progress
+ }
+ }
+ time.Sleep(100 * time.Millisecond) // Simulate some delay for migration
}
// Third pass: update all month files with new file IDs
logsMutex.RUnlock()
if err != nil {
Logger.Printf("Error reading month file %s: %v", monthPath, err)
+ progress.ErrorCount++
continue
}
var monthData map[string]any
if err := json.Unmarshal(monthBytes, &monthData); err != nil {
Logger.Printf("Error parsing month data %s: %v", monthPath, err)
+ progress.ErrorCount++
continue
}
// If we have a mapping for this file UUID, update it
if newID, exists := fileIDMap[fileUUID]; exists {
- // Entferne das alte Format und ersetze es durch das neue Format
+ // Remove the old format and replace it with the new format
delete(file, "id")
file["uuid_filename"] = newID
- // Finde die korrekte Größe für diese Datei
+ // Find the correct size for this file
var fileSize uint64
for _, ref := range fileRefs {
if ref.OrigUUID == fileUUID {
plainName, err = FernetDecrypt(encName, oldKeyBytes)
if err != nil {
Logger.Printf("Error decrypting filename for %s: %v", fileUUID, err)
+ progress.ErrorCount++
continue
}
newEncName, err = EncryptText(plainName, newKey)
if err != nil {
Logger.Printf("Error encrypting filename for %s: %v", fileUUID, err)
+ progress.ErrorCount++
continue
}
if err != nil {
logsMutex.Unlock()
Logger.Printf("Error creating file %s: %v", monthPath, err)
+ progress.ErrorCount++
continue
}
file.Close()
logsMutex.Unlock()
Logger.Printf("Error encoding month data for %s: %v", monthPath, err)
+ progress.ErrorCount++
continue
}
}
// Final progress update
+ progress.ProcessedItems = processedFiles
if progressChan != nil {
- progressChan <- MigrationProgress{
- Phase: "migrating_files",
- ProcessedItems: processedFiles,
- TotalItems: totalFiles,
- }
+ progressChan <- *progress
}
Logger.Printf("Completed migrating %d/%d files", processedFiles, totalFiles)
package utils
import (
+ "crypto/cipher"
"crypto/rand"
"encoding/base64"
"fmt"
"github.com/golang-jwt/jwt/v5"
"github.com/google/uuid"
"golang.org/x/crypto/argon2"
+ "golang.org/x/crypto/chacha20poly1305"
)
// Claims represents the JWT claims
return encodedUUID, nil
}
+
+// CreateAEAD creates an AEAD cipher for encryption/decryption
+func CreateAEAD(key []byte) (cipher.AEAD, error) {
+ return chacha20poly1305.New(key)
+}
+
+// EncryptText encrypts text using the provided key
+func EncryptText(text, key string) (string, error) {
+ // Decode key
+ keyBytes, err := base64.URLEncoding.DecodeString(key)
+ if err != nil {
+ return "", fmt.Errorf("error decoding key: %v", err)
+ }
+
+ // Create AEAD cipher
+ aead, err := chacha20poly1305.New(keyBytes)
+ if err != nil {
+ return "", fmt.Errorf("error creating cipher: %v", err)
+ }
+
+ // Create nonce
+ nonce := make([]byte, aead.NonceSize())
+ if _, err := io.ReadFull(rand.Reader, nonce); err != nil {
+ return "", fmt.Errorf("error creating nonce: %v", err)
+ }
+
+ // Encrypt text
+ ciphertext := aead.Seal(nonce, nonce, []byte(text), nil)
+ return base64.URLEncoding.EncodeToString(ciphertext), nil
+}
+
+// DecryptText decrypts text using the provided key
+func DecryptText(ciphertext, key string) (string, error) {
+ // Decode key and ciphertext
+ keyBytes, err := base64.URLEncoding.DecodeString(key)
+ if err != nil {
+ return "", fmt.Errorf("error decoding key: %v", err)
+ }
+
+ ciphertextBytes, err := base64.URLEncoding.DecodeString(ciphertext)
+ if err != nil {
+ return "", fmt.Errorf("error decoding ciphertext: %v", err)
+ }
+
+ // Create AEAD cipher
+ aead, err := chacha20poly1305.New(keyBytes)
+ if err != nil {
+ return "", fmt.Errorf("error creating cipher: %v", err)
+ }
+
+ // Extract nonce from ciphertext
+ if len(ciphertextBytes) < aead.NonceSize() {
+ return "", fmt.Errorf("ciphertext too short")
+ }
+ nonce, ciphertextBytes := ciphertextBytes[:aead.NonceSize()], ciphertextBytes[aead.NonceSize():]
+
+ // Decrypt text
+ plaintext, err := aead.Open(nil, nonce, ciphertextBytes, nil)
+ if err != nil {
+ return "", fmt.Errorf("error decrypting ciphertext: %v", err)
+ }
+
+ return string(plaintext), nil
+}
+
+// EncryptFile encrypts a file using the provided key
+func EncryptFile(data []byte, key string) ([]byte, error) {
+ // Decode key
+ keyBytes, err := base64.URLEncoding.DecodeString(key)
+ if err != nil {
+ return nil, fmt.Errorf("error decoding key: %v", err)
+ }
+
+ // Create AEAD cipher
+ aead, err := chacha20poly1305.New(keyBytes)
+ if err != nil {
+ return nil, fmt.Errorf("error creating cipher: %v", err)
+ }
+
+ // Create nonce
+ nonce := make([]byte, aead.NonceSize())
+ if _, err := io.ReadFull(rand.Reader, nonce); err != nil {
+ return nil, fmt.Errorf("error creating nonce: %v", err)
+ }
+
+ // Encrypt file
+ ciphertext := aead.Seal(nonce, nonce, data, nil)
+ return ciphertext, nil
+}
+
+// DecryptFile decrypts a file using the provided key
+func DecryptFile(ciphertext []byte, key string) ([]byte, error) {
+ // Decode key
+ keyBytes, err := base64.URLEncoding.DecodeString(key)
+ if err != nil {
+ return nil, fmt.Errorf("error decoding key: %v", err)
+ }
+
+ // Create AEAD cipher
+ aead, err := chacha20poly1305.New(keyBytes)
+ if err != nil {
+ return nil, fmt.Errorf("error creating cipher: %v", err)
+ }
+
+ // Extract nonce from ciphertext
+ if len(ciphertext) < aead.NonceSize() {
+ return nil, fmt.Errorf("ciphertext too short")
+ }
+ nonce, ciphertext := ciphertext[:aead.NonceSize()], ciphertext[aead.NonceSize():]
+
+ // Decrypt file
+ plaintext, err := aead.Open(nil, nonce, ciphertext, nil)
+ if err != nil {
+ return nil, fmt.Errorf("error decrypting ciphertext: %v", err)
+ }
+
+ return plaintext, nil
+}
+
+// GetEncryptionKey retrieves the encryption key for a specific user
+func GetEncryptionKey(userID int, derivedKey string) (string, error) {
+ // Get users
+ users, err := GetUsers()
+ if err != nil {
+ return "", fmt.Errorf("error retrieving users: %v", err)
+ }
+
+ // Find user
+ usersList, ok := users["users"].([]any)
+ if !ok {
+ return "", fmt.Errorf("users.json is not in the correct format")
+ }
+
+ for _, u := range usersList {
+ user, ok := u.(map[string]any)
+ if !ok {
+ continue
+ }
+
+ if id, ok := user["user_id"].(float64); ok && int(id) == userID {
+ encEncKey, ok := user["enc_enc_key"].(string)
+ if !ok {
+ return "", fmt.Errorf("user data is not in the correct format")
+ }
+
+ // Decode derived key
+ derivedKeyBytes, err := base64.StdEncoding.DecodeString(derivedKey)
+ if err != nil {
+ return "", fmt.Errorf("error decoding derived key: %v", err)
+ }
+
+ // Create Fernet cipher
+ aead, err := CreateAEAD(derivedKeyBytes)
+ if err != nil {
+ return "", fmt.Errorf("error creating cipher: %v", err)
+ }
+
+ // Decode encrypted key
+ encEncKeyBytes, err := base64.StdEncoding.DecodeString(encEncKey)
+ if err != nil {
+ return "", fmt.Errorf("error decoding encrypted key: %v", err)
+ }
+
+ // Extract nonce from encrypted key
+ if len(encEncKeyBytes) < aead.NonceSize() {
+ return "", fmt.Errorf("encrypted key too short")
+ }
+ nonce, encKeyBytes := encEncKeyBytes[:aead.NonceSize()], encEncKeyBytes[aead.NonceSize():]
+
+ // Decrypt key
+ keyBytes, err := aead.Open(nil, nonce, encKeyBytes, nil)
+ if err != nil {
+ return "", fmt.Errorf("error decrypting key: %v", err)
+ }
+
+ // Return base64-encoded key
+ return base64.URLEncoding.EncodeToString(keyBytes), nil
+ }
+ }
+
+ return "", fmt.Errorf("user not found")
+}
let registration_allowed = $state(true);
+ let migration_phases = $state([
+ 'creating_new_user',
+ 'migrating_templates',
+ 'migrating_logs',
+ 'migrating_files',
+ 'completed'
+ ]);
+
+ let migration_phase = $state('');
+ let migration_progress_total = $state(0);
+ let migration_progress = $state(0);
+ let migration_error_count = $state(0);
+
+ let active_phase = $derived(
+ // find the current phase in migration_phases
+ migration_phases.indexOf(migration_phase)
+ );
+
onMount(() => {
// if params error=440 or error=401, show toast
if (window.location.search.includes('error=440')) {
});
}
+ function handleMigrationProgress(username) {
+ // Poll the server for migration progress
+ const interval = setInterval(() => {
+ axios
+ .get(API_URL + '/users/migrationProgress', { params: { username } })
+ .then((response) => {
+ const progress = response.data.progress;
+ if (progress) {
+ migration_phase = progress.phase;
+ migration_progress_total = progress.total_items;
+ migration_progress = progress.processed_items;
+
+ // Stop polling when migration is complete
+ if (progress.phase === 'completed') {
+ console.log('Migration completed successfully');
+ is_migrating = false;
+ migration_error_count = progress.error_count;
+ clearInterval(interval);
+ }
+ }
+
+ if (
+ !response.data.migration_in_progress &&
+ !response.data.progress.phase === 'not_started'
+ ) {
+ console.log('Migration stopped');
+ is_migrating = false;
+ clearInterval(interval);
+ }
+ })
+ .catch((error) => {
+ console.error('Error fetching migration progress:', error);
+ clearInterval(interval); // Stop polling on error
+ });
+ }, 500); // Poll every 500ms
+ }
+
function handleLogin(event) {
event.preventDefault();
}
is_logging_in = true;
+ console.log(API_URL);
axios
.post(API_URL + '/users/login', { username, password })
.then((response) => {
if (response.data.migration_started) {
is_migrating = true;
+
+ handleMigrationProgress(response.data.username);
} else {
localStorage.setItem('user', JSON.stringify(response.data.username));
goto('/write');
}
})
.catch((error) => {
+ console.log(error);
if (error.response.status === 404) {
show_login_failed = true;
}
/>
<label for="loginPassword">Password</label>
</div>
- {#if is_migrating}
+ {#if is_migrating || migration_phase == 'completed'}
<div class="alert alert-info" role="alert">
- Daten-Migration wurde gestartet. Dies kann einige Minuten dauern.<br />
+ Daten-Migration wurde gestartet. Dies kann einige Momente dauern.<br />
<div class="text-bg-danger p-2 my-2 rounded">
Währenddessen die Seite nicht neu laden und nicht neu einloggen!
</div>
Fortschritt:
- <br />
+ <div class="progress-item {active_phase >= 0 ? 'active' : ''}">
+ <div class="d-flex">
+ <div class="emoji">
+ {#if active_phase <= 0}
+ ➡️
+ {:else}
+ ✅
+ {/if}
+ </div>
+ Account anlegen
+ </div>
+ </div>
+ <div class="progress-item {active_phase >= 1 ? 'active' : ''}">
+ <div class="d-flex">
+ <div class="emoji">
+ {#if active_phase <= 1}
+ ➡️
+ {:else}
+ ✅
+ {/if}
+ </div>
+ Vorlagen migrieren
+ </div>
+ </div>
+ <div class="progress-item {active_phase >= 2 ? 'active' : ''}">
+ <div class="d-flex">
+ <div class="emoji">
+ {#if active_phase <= 2}
+ ➡️
+ {:else}
+ ✅
+ {/if}
+ </div>
+ Logs migrieren
+ </div>
+
+ {#if active_phase === 2}
+ <div
+ class="progress"
+ role="progressbar"
+ aria-label="Progress"
+ aria-valuenow="0"
+ aria-valuemin="0"
+ aria-valuemax="100"
+ >
+ <div
+ class="progress-bar"
+ style="width: {(migration_progress / migration_progress_total) * 100}%"
+ >
+ {migration_progress}/{migration_progress_total}
+ </div>
+ </div>
+ {/if}
+ </div>
+ <div class="progress-item {active_phase >= 3 ? 'active' : ''}">
+ <div class="d-flex">
+ <div class="emoji">
+ {#if active_phase <= 3}
+ ➡️
+ {:else}
+ ✅
+ {/if}
+ </div>
+ Dateien migrieren
+ </div>
+ {#if active_phase === 3}
+ <div
+ class="progress"
+ role="progressbar"
+ aria-label="Progress"
+ aria-valuenow="0"
+ aria-valuemin="0"
+ aria-valuemax="100"
+ >
+ <div
+ class="progress-bar"
+ style="width: {(migration_progress / migration_progress_total) * 100}%"
+ >
+ {migration_progress}/{migration_progress_total}
+ </div>
+ </div>
+ {/if}
+ </div>
+ {#if migration_phase === 'completed'}
+ {#if migration_error_count == 0}
+ <div class="text-bg-success p-2 my-2 rounded">
+ Migration wurde ohne erkannte Fehler abgeschlossen! Bitte Login erneut
+ starten. <br />
+ Prüfen Sie anschließend, ob alle Daten korrekt migriert wurden.
+ </div>
+ {:else}
+ <div class="text-bg-warning p-2 my-2 rounded">
+ Migration wurde mit {migration_error_count} erkannten Fehlern abgeschlossen!
+ Prüfen Sie die Server-Logs für Details!<br />
+ Falls der Login nicht funktioniert, oder die Daten fehlerhaft sind, so müssen
+ die migrierten Daten händisch entfernt werden.
+ </div>
+ {/if}
+ {/if}
</div>
{/if}
{#if show_login_failed}
{/if}
<div class="d-flex justify-content-center">
<button type="submit" class="btn btn-primary" disabled={is_logging_in}>
- {#if is_logging_in}
+ {#if is_logging_in || is_migrating}
<div class="spinner-border spinner-border-sm" role="status">
<span class="visually-hidden">Loading...</span>
</div>
</div>
<style>
+ .progress-item {
+ opacity: 0.5;
+ }
+
+ .progress-item.active {
+ opacity: 1;
+ }
+
+ .progress-item .emoji {
+ visibility: hidden;
+ }
+
+ .progress-item.active .emoji {
+ visibility: visible;
+ }
+
.logo-wrapper {
width: 50%;
}