began code for migration and other fixes
authorPhiTux <redacted>
Fri, 11 Jul 2025 16:49:09 +0000 (18:49 +0200)
committerPhiTux <redacted>
Fri, 11 Jul 2025 16:49:09 +0000 (18:49 +0200)
12 files changed:
backend/.gitignore
backend/backend [deleted file]
backend/go.mod
backend/handlers/logs.go
backend/handlers/users.go
backend/main.go
backend/middleware/middleware.go
backend/utils/file_handling.go
backend/utils/helpers.go [new file with mode: 0644]
backend/utils/migration.go [new file with mode: 0644]
backend/utils/security.go
frontend/src/routes/login/+page.svelte

index c3190e3272cf256f5c28c5ad7983da5adb6cff41..e9d753f8cba0f70cbca275b3f1743d686d839d0d 100644 (file)
@@ -1,2 +1,4 @@
 .envrc
-data/
\ No newline at end of file
+data/
+data.bak/
+backend
\ No newline at end of file
diff --git a/backend/backend b/backend/backend
deleted file mode 100755 (executable)
index e3261cb..0000000
Binary files a/backend/backend and /dev/null differ
index d2211d1b47f5e8eaf6f2f33451c0d1e12ada014e..24f2a559aa6e50dde8a090dc011c41f71fb1571f 100644 (file)
@@ -1,6 +1,6 @@
 module github.com/phitux/dailytxt/backend
 
-go 1.22
+go 1.24
 
 require (
        github.com/golang-jwt/jwt/v5 v5.2.0
index 1eda899870929407c6b51d2c72974a5cb6a190ec..e014e08db299af704a822c18ece3ea1bb59f1515 100644 (file)
@@ -738,12 +738,12 @@ func GetOnThisDay(w http.ResponseWriter, r *http.Request) {
        }
 
        // Get parameters from URL
-       month, err := strconv.Atoi(r.PathValue("month"))
+       month, err := strconv.Atoi(r.URL.Query().Get("month"))
        if err != nil {
                http.Error(w, "Invalid month parameter", http.StatusBadRequest)
                return
        }
-       day, err := strconv.Atoi(r.PathValue("day"))
+       day, err := strconv.Atoi(r.URL.Query().Get("day"))
        if err != nil {
                http.Error(w, "Invalid day parameter", http.StatusBadRequest)
                return
@@ -897,7 +897,7 @@ func Search(w http.ResponseWriter, r *http.Request) {
        }
 
        // Get query parameter
-       searchString := r.URL.Query().Get("q")
+       searchString := r.URL.Query().Get("searchString")
        if searchString == "" {
                http.Error(w, "Missing search parameter", http.StatusBadRequest)
                return
index 448e454366b1e356ba1f9d2adddd04861ee6c3b3..460e1afa56bb599d8f06b815376ab0b84d6160ad 100644 (file)
@@ -5,6 +5,7 @@ import (
        "encoding/json"
        "fmt"
        "net/http"
+       "sync"
        "time"
 
        "github.com/phitux/dailytxt/backend/utils"
@@ -36,8 +37,8 @@ func Login(w http.ResponseWriter, r *http.Request) {
        usersList, ok := users["users"].([]any)
        if !ok || len(usersList) == 0 {
                utils.Logger.Printf("Login failed. User '%s' not found", req.Username)
-               http.Error(w, "User/Password combination not found", http.StatusNotFound)
-               return
+               /* http.Error(w, "User/Password combination not found", http.StatusNotFound)
+               return */
        }
 
        // Find user
@@ -68,8 +69,123 @@ func Login(w http.ResponseWriter, r *http.Request) {
        }
 
        if !found {
-               utils.Logger.Printf("Login failed. User '%s' not found", req.Username)
-               http.Error(w, "User/Password combination not found", http.StatusNotFound)
+               // Try to find user in old data
+               oldUsers, err := utils.GetOldUsers()
+               if err != nil {
+                       utils.Logger.Printf("Error accessing old users: %v", err)
+                       http.Error(w, "User/Password combination not found", http.StatusNotFound)
+                       return
+               }
+
+               oldUsersList, ok := oldUsers["users"].([]interface{})
+               if !ok || len(oldUsersList) == 0 {
+                       utils.Logger.Printf("Login failed. User '%s' not found in new or old data", req.Username)
+                       http.Error(w, "User/Password combination not found", http.StatusNotFound)
+                       return
+               }
+
+               // Find user in old data
+               var oldUser map[string]interface{}
+               for _, u := range oldUsersList {
+                       user, ok := u.(map[string]interface{})
+                       if !ok {
+                               continue
+                       }
+
+                       if username, ok := user["username"].(string); ok && username == req.Username {
+                               oldUser = user
+                               break
+                       }
+               }
+
+               if oldUser == nil {
+                       utils.Logger.Printf("Login failed. User '%s' not found in new or old data", req.Username)
+                       http.Error(w, "User/Password combination not found", http.StatusNotFound)
+                       return
+               }
+
+               // Get password
+               oldHashedPassword, ok := oldUser["password"].(string)
+               if !ok {
+                       utils.Logger.Printf("Login failed. Password not found for '%s'", req.Username)
+                       http.Error(w, "User/Password combination not found", http.StatusNotFound)
+                       return
+               }
+
+               // Verify old password
+               if !utils.VerifyOldPassword(req.Password, oldHashedPassword) {
+                       utils.Logger.Printf("Login failed. Old password for user '%s' is incorrect", req.Username)
+                       http.Error(w, "User/Password combination not found", http.StatusNotFound)
+                       return
+               }
+
+               // Start migration
+               utils.Logger.Printf("User '%s' found in old data. Starting migration...", req.Username)
+
+               // Check if there is already a migration in progress for this user
+               activeMigrationsMutex.RLock()
+               isActive := activeMigrations[req.Username]
+               activeMigrationsMutex.RUnlock()
+
+               if isActive {
+                       utils.Logger.Printf("Migration already in progress for user '%s'. Rejecting second attempt.", req.Username)
+                       utils.JSONResponse(w, http.StatusConflict, map[string]any{
+                               "error": "Migration already in progress for this user. Please wait until it completes.",
+                       })
+                       return
+               }
+
+               // Mark this user as having an active migration
+               activeMigrationsMutex.Lock()
+               activeMigrations[req.Username] = true
+               activeMigrationsMutex.Unlock()
+
+               // Create a channel to report progress
+               progressChan := make(chan utils.MigrationProgress, 10)
+
+               // Start migration in a goroutine
+               go func() {
+                       defer close(progressChan)
+
+                       // Update progress channel to track migration progress
+                       go func() {
+                               for progress := range progressChan {
+                                       migrationProgressMutex.Lock()
+                                       // Convert from utils.MigrationProgress to handlers.MigrationProgress
+                                       migrationProgress[req.Username] = MigrationProgress{
+                                               Phase:          progress.Phase,
+                                               CurrentItem:    progress.CurrentItem,
+                                               ProcessedItems: progress.ProcessedItems,
+                                               TotalItems:     progress.TotalItems,
+                                       }
+                                       migrationProgressMutex.Unlock()
+                               }
+                       }()
+
+                       err := utils.MigrateUserData(req.Username, req.Password, progressChan)
+                       if err != nil {
+                               utils.Logger.Printf("Migration failed for user '%s': %v", req.Username, err)
+                               // Mark migration as completed even on error
+                               activeMigrationsMutex.Lock()
+                               activeMigrations[req.Username] = false
+                               activeMigrationsMutex.Unlock()
+                               return
+                       }
+
+                       utils.Logger.Printf("Migration completed for user '%s'", req.Username)
+
+                       // Mark migration as completed
+                       activeMigrationsMutex.Lock()
+                       activeMigrations[req.Username] = false
+                       activeMigrationsMutex.Unlock()
+               }()
+
+               // Return migration status to client
+               utils.JSONResponse(w, http.StatusAccepted, map[string]interface{}{
+                       "migration_started": true,
+                       "username":          req.Username,
+                       "message":           "User found in old data. Migration started. Please wait and retry login in a few moments.",
+               })
                return
        }
 
@@ -111,6 +227,13 @@ func Login(w http.ResponseWriter, r *http.Request) {
        })
 }
 
+func IsRegistrationAllowed(w http.ResponseWriter, r *http.Request) {
+       // Check if registration is allowed
+       utils.JSONResponse(w, http.StatusOK, map[string]bool{
+               "registration_allowed": utils.Settings.AllowRegistration,
+       })
+}
+
 // RegisterRequest represents the register request body
 type RegisterRequest struct {
        Username string `json:"username"`
@@ -455,3 +578,99 @@ func SaveUserSettings(w http.ResponseWriter, r *http.Request) {
                "success": true,
        })
 }
+
+// MigrationProgress stores the progress of user data migration
+type MigrationProgress struct {
+       Phase          string `json:"phase"`           // Current migration phase
+       CurrentItem    string `json:"current_item"`    // Current item being migrated
+       ProcessedItems int    `json:"processed_items"` // Number of items processed
+       TotalItems     int    `json:"total_items"`     // Total number of items to process
+}
+
+// migrationProgress keeps track of migration progress for all users
+var migrationProgress = make(map[string]MigrationProgress)
+var migrationProgressMutex sync.Mutex
+var activeMigrations = make(map[string]bool)
+var activeMigrationsMutex sync.RWMutex
+
+// CheckMigrationProgress checks the progress of a user migration
+func CheckMigrationProgress(w http.ResponseWriter, r *http.Request) {
+       // Get username from query parameters
+       username := r.URL.Query().Get("username")
+       if username == "" {
+               http.Error(w, "Username is required", http.StatusBadRequest)
+               return
+       }
+
+       // Get progress
+       migrationProgressMutex.Lock()
+       progress, exists := migrationProgress[username]
+       migrationProgressMutex.Unlock()
+
+       if !exists {
+               utils.JSONResponse(w, http.StatusOK, map[string]interface{}{
+                       "progress": 0,
+                       "status":   "not_started",
+               })
+               return
+       }
+
+       // Return progress
+       status := "in_progress"
+       if progress.TotalItems > 0 && progress.ProcessedItems >= progress.TotalItems {
+               status = "completed"
+       }
+
+       utils.JSONResponse(w, http.StatusOK, map[string]interface{}{
+               "progress": progress,
+               "status":   status,
+       })
+}
+
+// GetMigrationProgress returns the migration progress for a user
+func GetMigrationProgress(w http.ResponseWriter, r *http.Request) {
+       // Parse the request body
+       var req struct {
+               Username string `json:"username"`
+       }
+       if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
+               http.Error(w, "Invalid request body", http.StatusBadRequest)
+               return
+       }
+
+       // Get migration progress
+       migrationProgressMutex.Lock()
+       progress, exists := migrationProgress[req.Username]
+       migrationProgressMutex.Unlock()
+
+       // Check if migration is actually active
+       activeMigrationsMutex.RLock()
+       isActive := activeMigrations[req.Username]
+       activeMigrationsMutex.RUnlock()
+
+       if !exists {
+               utils.JSONResponse(w, http.StatusOK, map[string]interface{}{
+                       "migration_in_progress": false,
+                       "status":                "not_started",
+               })
+               return
+       }
+
+       // Check if migration is completed
+       migrationCompleted := progress.Phase == "completed" || (progress.ProcessedItems >= progress.TotalItems && progress.TotalItems > 0)
+
+       // Return progress
+       status := "in_progress"
+       if migrationCompleted {
+               status = "completed"
+       } else if !isActive {
+               // If migration is not active but not completed, it might have failed
+               status = "failed"
+       }
+
+       utils.JSONResponse(w, http.StatusOK, map[string]interface{}{
+               "migration_in_progress": isActive && !migrationCompleted,
+               "progress":              progress,
+               "status":                status,
+       })
+}
index d9c13ba82af88d4197c7807b403c2e50bf70a10a..66e1d4600188a8552c4d75ac23b3d9bc021ca310 100644 (file)
@@ -24,16 +24,21 @@ func main() {
                logger.Fatalf("Failed to initialize settings: %v", err)
        }
 
+       // Check and handle old data migration if needed
+       utils.HandleOldData(logger)
+
        // Create a new router
        mux := http.NewServeMux()
 
        // Register routes
        mux.HandleFunc("POST /users/login", handlers.Login)
+       mux.HandleFunc("GET /users/isRegistrationAllowed", handlers.IsRegistrationAllowed)
        mux.HandleFunc("POST /users/register", handlers.Register)
        mux.HandleFunc("GET /users/logout", handlers.Logout)
        mux.HandleFunc("GET /users/check", middleware.RequireAuth(handlers.CheckLogin))
        mux.HandleFunc("GET /users/getUserSettings", middleware.RequireAuth(handlers.GetUserSettings))
        mux.HandleFunc("POST /users/saveUserSettings", middleware.RequireAuth(handlers.SaveUserSettings))
+       mux.HandleFunc("POST /users/migrationProgress", handlers.GetMigrationProgress)
 
        mux.HandleFunc("POST /logs/saveLog", middleware.RequireAuth(handlers.SaveLog))
        mux.HandleFunc("GET /logs/getLog", middleware.RequireAuth(handlers.GetLog))
@@ -56,8 +61,9 @@ func main() {
        mux.HandleFunc("GET /logs/getHistory", middleware.RequireAuth(handlers.GetHistory))
        mux.HandleFunc("GET /logs/bookmarkDay", middleware.RequireAuth(handlers.BookmarkDay))
 
-       // Create a handler with CORS middleware
-       handler := middleware.CORS(mux)
+       // Create a handler chain with Logger and CORS middleware
+       // Logger middleware will be executed first, then CORS
+       handler := middleware.Logger(middleware.CORS(mux))
 
        // Create the server
        server := &http.Server{
index a8721dfab854dadcb721dd9aef3d97a726b1e509..704403ca47c04a1607f79d374d9ee0692624ca81 100644 (file)
@@ -4,6 +4,7 @@ import (
        "context"
        "net/http"
        "strings"
+       "time"
 
        "github.com/phitux/dailytxt/backend/utils"
 )
@@ -73,6 +74,12 @@ func RequireAuth(next http.HandlerFunc) http.HandlerFunc {
 // Logger middleware logs all requests
 func Logger(next http.Handler) http.Handler {
        return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+               // If not in development mode, skip detailed logging
+               if !utils.Settings.Development {
+                       next.ServeHTTP(w, r)
+                       return
+               }
+
                // Skip logging for static files
                if strings.HasPrefix(r.URL.Path, "/static/") {
                        next.ServeHTTP(w, r)
@@ -80,9 +87,28 @@ func Logger(next http.Handler) http.Handler {
                }
 
                // Log request
-               utils.Logger.Printf("Request: %s %s", r.Method, r.URL.Path)
+               startTime := time.Now()
 
-               // Continue with next handler
-               next.ServeHTTP(w, r)
+               // Create a response writer wrapper to capture the status code
+               rw := &responseWriter{ResponseWriter: w, statusCode: http.StatusOK}
+
+               // Call the next handler with our custom response writer
+               next.ServeHTTP(rw, r)
+
+               // Log response
+               duration := time.Since(startTime)
+               utils.Logger.Printf("%s %s - Status: %d - Duration: %v", r.Method, r.URL.Path, rw.statusCode, duration)
        })
 }
+
+// responseWriter is a wrapper for http.ResponseWriter that captures the status code
+type responseWriter struct {
+       http.ResponseWriter
+       statusCode int
+}
+
+// WriteHeader captures the status code and delegates to the underlying ResponseWriter
+func (rw *responseWriter) WriteHeader(statusCode int) {
+       rw.statusCode = statusCode
+       rw.ResponseWriter.WriteHeader(statusCode)
+}
index f8989dd210057b5b2b895dfdea4aa2ee1d5f4843..cc8b5bcce771a9c75eba9d7b4e5a75e7b4d37f92 100644 (file)
@@ -11,12 +11,22 @@ import (
        "path/filepath"
        "strconv"
        "strings"
+       "sync"
 
        "golang.org/x/crypto/chacha20poly1305"
 )
 
+// Mutexes für Dateizugriffe
+var (
+       usersFileMutex    sync.RWMutex // Für users.json
+       userSettingsMutex sync.RWMutex // Für Benutzereinstellungen
+)
+
 // GetUsers retrieves the users from the users.json file
 func GetUsers() (map[string]any, error) {
+       usersFileMutex.RLock()
+       defer usersFileMutex.RUnlock()
+
        // Try to open the users.json file
        filePath := filepath.Join(Settings.DataPath, "users.json")
        file, err := os.Open(filePath)
@@ -46,6 +56,9 @@ func GetUsers() (map[string]any, error) {
 
 // WriteUsers writes the users to the users.json file
 func WriteUsers(content map[string]any) error {
+       usersFileMutex.Lock()
+       defer usersFileMutex.Unlock()
+
        // Create the users.json file
        filePath := filepath.Join(Settings.DataPath, "users.json")
        file, err := os.Create(filePath)
@@ -79,7 +92,6 @@ func GetMonth(userID int, year, month int) (map[string]any, error) {
        file, err := os.Open(filePath)
        if err != nil {
                if os.IsNotExist(err) {
-                       Logger.Printf("%s - File not found", filePath)
                        return map[string]any{}, nil
                }
                Logger.Printf("Error opening %s: %v", filePath, err)
@@ -207,6 +219,9 @@ func RandRead(b []byte) (int, error) {
 
 // GetUserSettings retrieves the settings for a specific user
 func GetUserSettings(userID int) (string, error) {
+       userSettingsMutex.RLock()
+       defer userSettingsMutex.RUnlock()
+
        // Try to open the settings.encrypted file
        filePath := filepath.Join(Settings.DataPath, fmt.Sprintf("%d/settings.encrypted", userID))
        file, err := os.Open(filePath)
@@ -232,6 +247,9 @@ func GetUserSettings(userID int) (string, error) {
 
 // WriteUserSettings writes the settings for a specific user
 func WriteUserSettings(userID int, content string) error {
+       userSettingsMutex.Lock()
+       defer userSettingsMutex.Unlock()
+
        // Create the directory if it doesn't exist
        dirPath := filepath.Join(Settings.DataPath, fmt.Sprintf("%d", userID))
        if err := os.MkdirAll(dirPath, 0755); err != nil {
diff --git a/backend/utils/helpers.go b/backend/utils/helpers.go
new file mode 100644 (file)
index 0000000..991970c
--- /dev/null
@@ -0,0 +1,334 @@
+package utils
+
+import (
+       "encoding/json"
+       "fmt"
+       "io"
+       "log"
+       "net/http"
+       "os"
+)
+
+// Global logger
+var Logger *log.Logger
+
+func init() {
+       // Initialize logger
+       Logger = log.New(os.Stdout, "dailytxt: ", log.LstdFlags|log.Lmicroseconds|log.Lshortfile)
+}
+
+// ContextKey is a type for context keys
+type ContextKey string
+
+// Context keys
+const (
+       UserIDKey     ContextKey = "userID"
+       UsernameKey   ContextKey = "username"
+       DerivedKeyKey ContextKey = "derivedKey"
+)
+
+// Settings holds the application settings
+type AppSettings struct {
+       DataPath          string   `json:"data_path"`
+       Development       bool     `json:"development"`
+       SecretToken       string   `json:"secret_token"`
+       LogoutAfterDays   int      `json:"logout_after_days"`
+       AllowedHosts      []string `json:"allowed_hosts"`
+       Indent            int      `json:"indent"`
+       AllowRegistration bool     `json:"allow_registration"`
+}
+
+// Global settings
+var Settings AppSettings
+
+// InitSettings loads the application settings
+func InitSettings() error {
+       // Default settings
+       Settings = AppSettings{
+               DataPath:          "/data",
+               Development:       false,
+               SecretToken:       GenerateSecretToken(),
+               LogoutAfterDays:   30,
+               AllowedHosts:      []string{"http://localhost:5173", "http://127.0.0.1:5173"},
+               Indent:            0,
+               AllowRegistration: false,
+       }
+
+       fmt.Print("\nDetected following settings:\n================\n")
+
+       // Override with environment variables if available
+       if dataPath := os.Getenv("DATA_PATH"); dataPath != "" {
+               Settings.DataPath = dataPath
+       }
+       fmt.Printf("Data Path: %s\n", Settings.DataPath)
+
+       if os.Getenv("DEVELOPMENT") == "true" {
+               Settings.Development = true
+       }
+       fmt.Printf("Development Mode: %t\n", Settings.Development)
+
+       if secretToken := os.Getenv("SECRET_TOKEN"); secretToken != "" {
+               Settings.SecretToken = secretToken
+       }
+       fmt.Printf("Secret Token: %s\n", Settings.SecretToken)
+
+       if logoutDays := os.Getenv("LOGOUT_AFTER_DAYS"); logoutDays != "" {
+               // Parse logoutDays to int
+               var days int
+               if _, err := fmt.Sscanf(logoutDays, "%d", &days); err == nil {
+                       Settings.LogoutAfterDays = days
+               }
+       }
+       fmt.Printf("Logout After Days: %d\n", Settings.LogoutAfterDays)
+
+       if indent := os.Getenv("INDENT"); indent != "" {
+               // Parse indent to int
+               var ind int
+               if _, err := fmt.Sscanf(indent, "%d", &ind); err == nil {
+                       Settings.Indent = ind
+               }
+       }
+       fmt.Printf("Indent: %d\n", Settings.Indent)
+
+       if allowRegistration := os.Getenv("ALLOW_REGISTRATION"); allowRegistration != "" {
+               // Parse allowRegistration to bool
+               if allowRegistration == "true" {
+                       Settings.AllowRegistration = true
+               } else {
+                       Settings.AllowRegistration = false
+               }
+       }
+       fmt.Printf("Allow Registration: %t\n", Settings.AllowRegistration)
+
+       fmt.Print("================\n\n")
+
+       // Create data directory if it doesn't exist
+       if err := os.MkdirAll(Settings.DataPath, 0755); err != nil {
+               return fmt.Errorf("failed to create data directory: %v", err)
+       }
+
+       return nil
+}
+
+// JSONResponse sends a JSON response with the given status code and data
+func JSONResponse(w http.ResponseWriter, statusCode int, data any) {
+       // Set content type
+       w.Header().Set("Content-Type", "application/json")
+       w.WriteHeader(statusCode)
+
+       // Encode data to JSON
+       var encoder *json.Encoder
+       if Settings.Development && Settings.Indent > 0 {
+               encoder = json.NewEncoder(w)
+               encoder.SetIndent("", fmt.Sprintf("%*s", Settings.Indent, ""))
+       } else {
+               encoder = json.NewEncoder(w)
+       }
+
+       if err := encoder.Encode(data); err != nil {
+               Logger.Printf("Error encoding JSON response: %v", err)
+               http.Error(w, "Internal Server Error", http.StatusInternalServerError)
+       }
+}
+
+// Move data to directory "old", if users.json is from dailytxt version 1
+func HandleOldData(logger *log.Logger) {
+       // Check if users.json exists
+       usersFile := Settings.DataPath + "/users.json"
+       if _, err := os.Stat(usersFile); os.IsNotExist(err) {
+               logger.Println("No users.json found, skipping old data check.")
+               return
+       }
+
+       // Read the file
+       data, err := os.ReadFile(usersFile)
+       if err != nil {
+               logger.Printf("Error reading users.json: %v", err)
+               return
+       }
+
+       // Check if the file is from dailytxt version 1
+       var usersData map[string]interface{}
+       if err := json.Unmarshal(data, &usersData); err != nil {
+               logger.Printf("Error parsing users.json: %v", err)
+               return
+       }
+
+       // Check if users array exists
+       usersArray, ok := usersData["users"].([]interface{})
+       if !ok || len(usersArray) == 0 {
+               logger.Println("No users found in users.json, skipping migration.")
+               return
+       }
+
+       // Check if any user is missing the dailytxt_version=2 field
+       needsMigration := false
+       for _, userInterface := range usersArray {
+               user, ok := userInterface.(map[string]interface{})
+               if !ok {
+                       continue
+               }
+
+               // Check if the version field exists and is 2
+               version, exists := user["dailytxt_version"]
+               if !exists || version != float64(2) {
+                       needsMigration = true
+                       logger.Printf("Found user without dailytxt_version=2: %s", user["username"])
+                       break
+               }
+       }
+
+       // If no migration is needed, return
+       if !needsMigration {
+               logger.Println("All users have dailytxt_version=2, no migration needed.")
+               return
+       }
+
+       // Create "old" directory
+       oldDir := Settings.DataPath + "/old"
+       if err := os.MkdirAll(oldDir, 0755); err != nil {
+               logger.Printf("Error creating old directory: %v", err)
+               return
+       }
+
+       // Move all files from data to old
+       logger.Println("Moving all data to old directory...")
+
+       // List all files and directories in the data path
+       entries, err := os.ReadDir(Settings.DataPath)
+       if err != nil {
+               logger.Printf("Error reading data directory: %v", err)
+               return
+       }
+
+       for _, entry := range entries {
+               name := entry.Name()
+               // Skip the "old" directory itself
+               if name == "old" {
+                       continue
+               }
+
+               srcPath := Settings.DataPath + "/" + name
+               destPath := oldDir + "/" + name
+
+               // Check if it's a directory or file
+               info, err := os.Stat(srcPath)
+               if err != nil {
+                       logger.Printf("Error getting info for %s: %v", srcPath, err)
+                       continue
+               }
+
+               if info.IsDir() {
+                       // For directories, copy recursively
+                       if err := CopyDir(srcPath, destPath, logger); err != nil {
+                               logger.Printf("Error copying directory %s to %s: %v", srcPath, destPath, err)
+                       } else {
+                               // Remove the original directory after successful copy
+                               if err := os.RemoveAll(srcPath); err != nil {
+                                       logger.Printf("Error removing original directory %s: %v", srcPath, err)
+                               }
+                       }
+               } else {
+                       // For files, copy directly
+                       if err := CopyFile(srcPath, destPath, logger); err != nil {
+                               logger.Printf("Error copying file %s to %s: %v", srcPath, destPath, err)
+                       } else {
+                               // Remove the original file after successful copy
+                               if err := os.Remove(srcPath); err != nil {
+                                       logger.Printf("Error removing original file %s: %v", srcPath, err)
+                               }
+                       }
+               }
+       }
+
+       logger.Println("All old data has been moved to " + oldDir + ". When logging in to old account, the migration will be started.\n")
+}
+
+// CopyFile copies a file from src to dst
+func CopyFile(src, dst string, logger *log.Logger) error {
+       // Open source file
+       srcFile, err := os.Open(src)
+       if err != nil {
+               return fmt.Errorf("failed to open source file: %w", err)
+       }
+       defer srcFile.Close()
+
+       // Create destination file
+       dstFile, err := os.Create(dst)
+       if err != nil {
+               return fmt.Errorf("failed to create destination file: %w", err)
+       }
+       defer dstFile.Close()
+
+       // Copy the content
+       _, err = io.Copy(dstFile, srcFile)
+       if err != nil {
+               return fmt.Errorf("failed to copy file content: %w", err)
+       }
+
+       // Sync the file to ensure it's written to disk
+       if err := dstFile.Sync(); err != nil {
+               return fmt.Errorf("failed to sync destination file: %w", err)
+       }
+
+       // Get the source file permissions
+       srcInfo, err := os.Stat(src)
+       if err != nil {
+               return fmt.Errorf("failed to get source file info: %w", err)
+       }
+
+       // Set the same permissions for the destination file
+       if err := os.Chmod(dst, srcInfo.Mode()); err != nil {
+               return fmt.Errorf("failed to set destination file permissions: %w", err)
+       }
+
+       logger.Printf("Copied file from %s to %s", src, dst)
+       return nil
+}
+
+// CopyDir copies a directory recursively from src to dst
+func CopyDir(src, dst string, logger *log.Logger) error {
+       // Get source directory info
+       srcInfo, err := os.Stat(src)
+       if err != nil {
+               return fmt.Errorf("failed to get source directory info: %w", err)
+       }
+
+       // Create destination directory with the same permissions
+       if err := os.MkdirAll(dst, srcInfo.Mode()); err != nil {
+               return fmt.Errorf("failed to create destination directory: %w", err)
+       }
+
+       // Read source directory entries
+       entries, err := os.ReadDir(src)
+       if err != nil {
+               return fmt.Errorf("failed to read source directory: %w", err)
+       }
+
+       // Copy each entry
+       for _, entry := range entries {
+               srcPath := src + "/" + entry.Name()
+               dstPath := dst + "/" + entry.Name()
+
+               // Get entry info
+               entryInfo, err := os.Stat(srcPath)
+               if err != nil {
+                       logger.Printf("Error getting info for %s: %v", srcPath, err)
+                       continue
+               }
+
+               // Copy directory or file
+               if entryInfo.IsDir() {
+                       if err := CopyDir(srcPath, dstPath, logger); err != nil {
+                               logger.Printf("Error copying directory %s to %s: %v", srcPath, dstPath, err)
+                       }
+               } else {
+                       if err := CopyFile(srcPath, dstPath, logger); err != nil {
+                               logger.Printf("Error copying file %s to %s: %v", srcPath, dstPath, err)
+                       }
+               }
+       }
+
+       logger.Printf("Copied directory from %s to %s", src, dst)
+       return nil
+}
diff --git a/backend/utils/migration.go b/backend/utils/migration.go
new file mode 100644 (file)
index 0000000..a7258b1
--- /dev/null
@@ -0,0 +1,1005 @@
+package utils
+
+import (
+       "crypto/aes"
+       "crypto/cipher"
+       "crypto/pbkdf2"
+       "crypto/sha256"
+       "encoding/base64"
+       "encoding/json"
+       "fmt"
+       "os"
+       "path/filepath"
+       "strconv"
+       "strings"
+       "sync"
+       "time"
+
+       "crypto/hmac"
+       "crypto/subtle"
+)
+
+// Mutexes für Dateizugriffe
+var (
+       activeMigrationsMutex sync.RWMutex // Für die Map der aktiven Migrationen
+       oldUsersFileMutex     sync.RWMutex // Für old/users.json
+       templatesMutex        sync.RWMutex // Für templates.json
+       tagsMutex             sync.RWMutex // Für tags.json
+       logsMutex             sync.RWMutex // Für Logs
+       filesMutex            sync.RWMutex // Für Dateien im files-Verzeichnis
+)
+
+// Map zur Verfolgung aktiver Migrationen (username -> bool)
+var activeMigrations = make(map[string]bool)
+
+// IsUserMigrating prüft, ob für einen Benutzer bereits eine Migration läuft
+func IsUserMigrating(username string) bool {
+       activeMigrationsMutex.RLock()
+       defer activeMigrationsMutex.RUnlock()
+       return activeMigrations[username]
+}
+
+// SetUserMigrating markiert einen Benutzer als migrierend oder nicht migrierend
+func SetUserMigrating(username string, migrating bool) {
+       activeMigrationsMutex.Lock()
+       defer activeMigrationsMutex.Unlock()
+       if migrating {
+               activeMigrations[username] = true
+       } else {
+               delete(activeMigrations, username)
+       }
+}
+
+// Ferent implementation based on Python's cryptography.fernet
+// Reference: https://github.com/fernet/spec/blob/master/Spec.md
+
+const (
+       fernetVersion byte  = 0x80
+       maxClockSkew  int64 = 60 // seconds
+)
+
+// FernetDecrypt decrypts a Fernet token using the given key
+func FernetDecrypt(token string, key []byte) ([]byte, error) {
+       // Decode token
+       tokenBytes, err := base64.URLEncoding.DecodeString(token)
+       if err != nil {
+               return nil, fmt.Errorf("invalid token encoding: %v", err)
+       }
+
+       // Check token length
+       if len(tokenBytes) < 1+8+16+1+32 {
+               return nil, fmt.Errorf("token too short")
+       }
+
+       // Check version
+       if tokenBytes[0] != fernetVersion {
+               return nil, fmt.Errorf("invalid token version")
+       }
+
+       // Extract parts
+       timestamp := tokenBytes[1:9]
+       iv := tokenBytes[9:25]
+       ciphertext := tokenBytes[25 : len(tokenBytes)-32]
+       hmacValue := tokenBytes[len(tokenBytes)-32:]
+
+       // Verify HMAC
+       if !verifyFernetHMAC(key, tokenBytes[:len(tokenBytes)-32], hmacValue) {
+               return nil, fmt.Errorf("invalid token signature")
+       }
+
+       // Verify timestamp
+       if !verifyFernetTimestamp(timestamp) {
+               return nil, fmt.Errorf("token expired")
+       }
+
+       // Create cipher
+       block, err := aes.NewCipher(key[16:32])
+       if err != nil {
+               return nil, fmt.Errorf("error creating cipher: %v", err)
+       }
+
+       // Decrypt
+       plaintext := make([]byte, len(ciphertext))
+       stream := cipher.NewCTR(block, iv)
+       stream.XORKeyStream(plaintext, ciphertext)
+
+       return plaintext, nil
+}
+
+// verifyFernetHMAC verifies the HMAC signature of a Fernet token
+func verifyFernetHMAC(key, data, signature []byte) bool {
+       // Create HMAC
+       h := hmac.New(sha256.New, key[:16])
+       h.Write(data)
+       expectedSignature := h.Sum(nil)
+
+       // Compare signatures
+       return subtle.ConstantTimeCompare(signature, expectedSignature) == 1
+}
+
+// verifyFernetTimestamp verifies that a Fernet token's timestamp is valid
+func verifyFernetTimestamp(timestamp []byte) bool {
+       // Parse timestamp
+       ts := int64(0)
+       for _, b := range timestamp {
+               ts = (ts << 8) | int64(b)
+       }
+
+       // Check if token is expired
+       now := time.Now().Unix()
+       return now-maxClockSkew <= ts && ts <= now+maxClockSkew
+}
+
+// GetOldUsers retrieves the users from the old users.json file
+func GetOldUsers() (map[string]any, error) {
+       oldUsersFileMutex.RLock()
+       defer oldUsersFileMutex.RUnlock()
+
+       // Try to open the old users.json file
+       filePath := filepath.Join(Settings.DataPath, "old", "users.json")
+       file, err := os.Open(filePath)
+       if err != nil {
+               if os.IsNotExist(err) {
+                       Logger.Printf("old/users.json - File not found")
+                       return map[string]any{}, nil
+               }
+               Logger.Printf("Error opening old/users.json: %v", err)
+               return nil, fmt.Errorf("internal server error when trying to open old/users.json")
+       }
+       defer file.Close()
+
+       // Read the file content
+       var content map[string]any
+       decoder := json.NewDecoder(file)
+       if err := decoder.Decode(&content); err != nil {
+               Logger.Printf("Error decoding old/users.json: %v", err)
+               return nil, fmt.Errorf("internal server error when trying to decode old/users.json")
+       }
+
+       return content, nil
+}
+
+// VerifyOldPassword verifies if a password matches a hash from the old version
+// Uses HMAC-SHA256 for verification
+func VerifyOldPassword(password, hash string) bool {
+       // Parse the hash format: sha256$salt$hash
+       parts := strings.Split(hash, "$")
+       if len(parts) != 3 || parts[0] != "sha256" {
+               return false
+       }
+
+       salt, storedHash := parts[1], parts[2]
+
+       // Create HMAC with SHA256
+       h := hmac.New(sha256.New, []byte(salt))
+       h.Write([]byte(password))
+       calculatedHash := fmt.Sprintf("%x", h.Sum(nil))
+
+       // Compare hashes using constant-time comparison to prevent timing attacks
+       return subtle.ConstantTimeCompare([]byte(calculatedHash), []byte(storedHash)) == 1
+}
+
+// MigrateUserData migrates a user's data from the old format to the new format
+func MigrateUserData(username, password string, progressChan chan<- MigrationProgress) error {
+       // Prüfen, ob bereits eine Migration für diesen Benutzer läuft
+       if IsUserMigrating(username) {
+               Logger.Printf("Migration for user %s is already in progress", username)
+               return fmt.Errorf("migration already in progress for user %s", username)
+       }
+
+       // Benutzer als migrierend markieren
+       SetUserMigrating(username, true)
+       // Sicherstellen, dass der Benutzer am Ende nicht mehr als migrierend markiert ist
+       defer SetUserMigrating(username, false)
+
+       Logger.Printf("Starting migration for user %s", username)
+
+       // Get old users
+       oldUsersFileMutex.RLock()
+       oldUsersPath := filepath.Join(Settings.DataPath, "old", "users.json")
+       oldUsersBytes, err := os.ReadFile(oldUsersPath)
+       oldUsersFileMutex.RUnlock()
+
+       if err != nil {
+               return fmt.Errorf("error reading old users: %v", err)
+       }
+
+       // Parse old users
+       var oldUsers map[string]any
+       if err := json.Unmarshal(oldUsersBytes, &oldUsers); err != nil {
+               return fmt.Errorf("error parsing old users: %v", err)
+       }
+
+       // Find the old user by username
+       oldUserID := 0
+       var oldUser map[string]any
+       for _, user := range oldUsers["users"].([]any) {
+               u := user.(map[string]any)
+               if u["username"] == username {
+                       oldUser = u
+                       break
+               }
+       }
+
+       if oldUser == nil {
+               return fmt.Errorf("user %s not found in old data", username)
+       }
+
+       oldUserID = int(oldUser["user_id"].(float64))
+
+       Logger.Printf("Found old user ID: %d", oldUserID)
+
+       // Set initial progress
+       if progressChan != nil {
+               progressChan <- MigrationProgress{
+                       Phase:          "initializing",
+                       CurrentItem:    "Checking user data",
+                       ProcessedItems: 0,
+                       TotalItems:     1,
+               }
+       }
+
+       // Verify username matches
+       oldUsername, ok := oldUser["username"].(string)
+       if !ok || oldUsername != username {
+               return fmt.Errorf("username mismatch: expected %s, got %s", username, oldUsername)
+       }
+
+       // Get encryption related data from old user
+       oldSalt, ok := oldUser["salt"].(string)
+       if !ok {
+               return fmt.Errorf("old user data is missing salt")
+       }
+
+       oldEncEncKey, ok := oldUser["enc_enc_key"].(string)
+       if !ok {
+               return fmt.Errorf("old user data is missing encrypted key")
+       }
+
+       // Update progress
+       if progressChan != nil {
+               progressChan <- MigrationProgress{
+                       Phase:          "decrypting_keys",
+                       CurrentItem:    "Decrypting old encryption keys",
+                       ProcessedItems: 1,
+                       TotalItems:     5,
+               }
+       }
+
+       // Derive key from password and salt
+       oldDerivedKey := DeriveKeyFromOldPassword(password, oldSalt)
+       derKey, err := base64.StdEncoding.DecodeString(base64.URLEncoding.EncodeToString(oldDerivedKey))
+       if err != nil {
+               return fmt.Errorf("error decoding old derived key: %v", err)
+       }
+       fmt.Printf("Old derived key: %x\n", derKey)
+       fmt.Printf("Old encrypted key: %s\n", oldEncEncKey)
+
+       // Decode the old encrypted key (just for validation)
+       _, err = base64.URLEncoding.DecodeString(oldEncEncKey)
+       if err != nil {
+               return fmt.Errorf("error decoding old encrypted key: %v", err)
+       }
+
+       // Decrypt the old encryption key
+       oldEncKey, err := FernetDecrypt(oldEncEncKey, oldDerivedKey)
+       if err != nil {
+               return fmt.Errorf("error decrypting old encryption key: %v", err)
+       }
+
+       // Update progress
+       if progressChan != nil {
+               progressChan <- MigrationProgress{
+                       Phase:          "creating_new_user",
+                       CurrentItem:    "Creating new user",
+                       ProcessedItems: 1,
+                       TotalItems:     5,
+               }
+       }
+
+       // Create new encryption key and user data
+       _, newSalt, err := HashPassword(password)
+       if err != nil {
+               return fmt.Errorf("error hashing password: %v", err)
+       }
+
+       newDerivedKey, err := DeriveKeyFromPassword(password, newSalt)
+       if err != nil {
+               return fmt.Errorf("error deriving key: %v", err)
+       }
+
+       // Create a new random encryption key
+       newEncKey := make([]byte, 32)
+       if _, err := RandRead(newEncKey); err != nil {
+               return fmt.Errorf("error generating new encryption key: %v", err)
+       }
+
+       // Encrypt the new encryption key
+       aead, err := CreateAEAD(newDerivedKey)
+       if err != nil {
+               return fmt.Errorf("error creating cipher: %v", err)
+       }
+
+       nonce := make([]byte, aead.NonceSize())
+       if _, err := RandRead(nonce); err != nil {
+               return fmt.Errorf("error generating nonce: %v", err)
+       }
+
+       encryptedNewKey := aead.Seal(nonce, nonce, newEncKey, nil)
+       _ = base64.StdEncoding.EncodeToString(encryptedNewKey)
+
+       // Update progress
+       if progressChan != nil {
+               progressChan <- MigrationProgress{
+                       Phase:          "creating_new_user",
+                       CurrentItem:    "Adding user to database",
+                       ProcessedItems: 2,
+                       TotalItems:     5,
+               }
+       }
+
+       // Get existing users or create new users object
+       newUsers, err := GetUsers()
+       if err != nil {
+               return fmt.Errorf("error getting users: %v", err)
+       }
+
+       // Determine new user ID (must be different from any existing user ID)
+       newUserID := oldUserID
+       var existingUserIDs = make(map[int]bool)
+
+       if len(newUsers) > 0 {
+               // Get existing user IDs
+               if usersList, ok := newUsers["users"].([]any); ok {
+                       for _, u := range usersList {
+                               user, ok := u.(map[string]any)
+                               if !ok {
+                                       continue
+                               }
+
+                               if id, ok := user["user_id"].(float64); ok {
+                                       existingUserIDs[int(id)] = true
+                               }
+                       }
+               }
+
+               // Find a free user ID if the old ID is already taken
+               for existingUserIDs[newUserID] {
+                       newUserID++
+               }
+       }
+
+       // Write new users
+       if err := WriteUsers(newUsers); err != nil {
+               return fmt.Errorf("error writing users: %v", err)
+       }
+
+       // Update progress
+       if progressChan != nil {
+               progressChan <- MigrationProgress{
+                       Phase:          "writing_user_data",
+                       CurrentItem:    "User data saved",
+                       ProcessedItems: 3,
+                       TotalItems:     5,
+               }
+       }
+
+       // Now migrate all the data
+       oldDataDir := filepath.Join(Settings.DataPath, "old", strconv.Itoa(oldUserID))
+       newDataDir := filepath.Join(Settings.DataPath, strconv.Itoa(newUserID))
+
+       // Create new data directory
+       if err := os.MkdirAll(newDataDir, 0755); err != nil {
+               return fmt.Errorf("error creating new data directory: %v", err)
+       }
+
+       // Update progress
+       if progressChan != nil {
+               progressChan <- MigrationProgress{
+                       Phase:          "migrating_settings",
+                       CurrentItem:    "Migrating user settings",
+                       ProcessedItems: 0,
+                       TotalItems:     5,
+               }
+       }
+
+       // Migrate user settings
+       if err := migrateUserSettings(oldDataDir, newDataDir, oldEncKey, base64.StdEncoding.EncodeToString(newEncKey), progressChan); err != nil {
+               return fmt.Errorf("error migrating settings: %v", err)
+       }
+
+       // Migrate templates
+       if err := migrateTemplates(oldDataDir, newDataDir, oldEncKey, base64.StdEncoding.EncodeToString(newEncKey), progressChan); err != nil {
+               return fmt.Errorf("error migrating templates: %v", err)
+       }
+
+       // Migrate tags
+       if err := migrateTags(oldDataDir, newDataDir, oldEncKey, base64.StdEncoding.EncodeToString(newEncKey), progressChan); err != nil {
+               return fmt.Errorf("error migrating tags: %v", err)
+       }
+
+       // Migrate logs (years/months)
+       if err := migrateLogs(oldDataDir, newDataDir, oldEncKey, base64.StdEncoding.EncodeToString(newEncKey), progressChan); err != nil {
+               return fmt.Errorf("error migrating logs: %v", err)
+       }
+
+       // Migrate files
+       if err := migrateFiles(oldDataDir, newDataDir, oldEncKey, base64.StdEncoding.EncodeToString(newEncKey), progressChan); err != nil {
+               return fmt.Errorf("error migrating files: %v", err)
+       }
+
+       // Set final progress
+       if progressChan != nil {
+               progressChan <- MigrationProgress{
+                       Phase:          "completed",
+                       CurrentItem:    "Migration completed",
+                       ProcessedItems: 5,
+                       TotalItems:     5,
+               }
+       }
+
+       Logger.Printf("Migration completed for user %s (Old ID: %d, New ID: %d)", username, oldUserID, newUserID)
+       return nil
+}
+
+/* // DeriveKeyFromOldPassword derives a key from a password using the old method (Python version)
+func DeriveKeyFromOldPassword(password, salt string) []byte {
+       // In Python version: hash_secret_raw with time_cost=2, memory_cost=2**15, parallelism=1, hash_len=32, type=Type.ID
+       // For simplicity, we're using SHA-256 here
+       h := sha256.New()
+       h.Write([]byte(salt + password))
+       derivedKey := h.Sum(nil)
+       return derivedKey
+} */
+
+// DeriveKeyFromOldPassword derives a key from a password using the old Python method
+// with PBKDF2-HMAC-SHA256 with 100,000 iterations and 32 bytes output
+func DeriveKeyFromOldPassword(password, salt string) []byte {
+       // Use PBKDF2 with HMAC-SHA256, 100,000 iterations, and 32 byte output
+       // This matches the Python werkzeug implementation for password hashing
+       derivedKey, _ := pbkdf2.Key(sha256.New, password, []byte(salt), 100000, 32)
+       return derivedKey
+}
+
+// MigrationProgress enthält Informationen zum Fortschritt der Migration
+type MigrationProgress struct {
+       Phase          string `json:"phase"`           // Aktuelle Migrationsphase
+       CurrentItem    string `json:"current_item"`    // Aktuelles Element, das migriert wird
+       ProcessedItems int    `json:"processed_items"` // Anzahl der bereits verarbeiteten Elemente
+       TotalItems     int    `json:"total_items"`     // Gesamtanzahl der zu migrierenden Elemente
+}
+
+// Helper functions for migration
+
+func migrateUserSettings(oldDir, newDir string, oldKey []byte, newKey string, progressChan chan<- MigrationProgress) error {
+       // Lock für alte Einstellungen
+       userSettingsMutex.RLock()
+       // Check if old settings exist
+       oldSettingsPath := filepath.Join(oldDir, "settings.encrypted")
+       _, err := os.Stat(oldSettingsPath)
+       exists := !os.IsNotExist(err)
+       userSettingsMutex.RUnlock()
+
+       if err != nil && !os.IsNotExist(err) {
+               return fmt.Errorf("error checking if old settings exist: %v", err)
+       }
+
+       if !exists {
+               return nil // No settings to migrate
+       }
+
+       // Update progress
+       if progressChan != nil {
+               progressChan <- MigrationProgress{
+                       Phase:          "migrating_settings",
+                       CurrentItem:    "Reading old settings",
+                       ProcessedItems: 1,
+                       TotalItems:     4,
+               }
+       }
+
+       // Read old settings mit Lock
+       userSettingsMutex.RLock()
+       oldSettingsBytes, err := os.ReadFile(oldSettingsPath)
+       userSettingsMutex.RUnlock()
+
+       if err != nil {
+               return fmt.Errorf("error reading old settings: %v", err)
+       }
+
+       // Update progress
+       if progressChan != nil {
+               progressChan <- MigrationProgress{
+                       Phase:          "migrating_settings",
+                       CurrentItem:    "Decrypting old settings",
+                       ProcessedItems: 2,
+                       TotalItems:     4,
+               }
+       }
+
+       // Decrypt old settings
+       oldSettingsPlain, err := FernetDecrypt(string(oldSettingsBytes), oldKey)
+       if err != nil {
+               return fmt.Errorf("error decrypting old settings: %v", err)
+       }
+
+       // Update progress
+       if progressChan != nil {
+               progressChan <- MigrationProgress{
+                       Phase:          "migrating_settings",
+                       CurrentItem:    "Encrypting settings with new key",
+                       ProcessedItems: 3,
+                       TotalItems:     4,
+               }
+       }
+
+       // Encrypt with new key
+       newSettingsEnc, err := EncryptText(string(oldSettingsPlain), newKey)
+       if err != nil {
+               return fmt.Errorf("error encrypting new settings: %v", err)
+       }
+
+       // Update progress
+       if progressChan != nil {
+               progressChan <- MigrationProgress{
+                       Phase:          "migrating_settings",
+                       CurrentItem:    "Writing new settings",
+                       ProcessedItems: 3,
+                       TotalItems:     4,
+               }
+       }
+
+       // Write new settings mit Lock
+       newSettingsPath := filepath.Join(newDir, "settings.encrypted")
+       userSettingsMutex.Lock()
+       err = os.WriteFile(newSettingsPath, []byte(newSettingsEnc), 0644)
+       userSettingsMutex.Unlock()
+
+       if err != nil {
+               return fmt.Errorf("error writing new settings: %v", err)
+       }
+
+       // Update progress
+       if progressChan != nil {
+               progressChan <- MigrationProgress{
+                       Phase:          "migrating_settings",
+                       CurrentItem:    "Settings migration completed",
+                       ProcessedItems: 4,
+                       TotalItems:     4,
+               }
+       }
+
+       return nil
+}
+
+func migrateTemplates(oldDir, newDir string, oldKey []byte, newKey string, progressChan chan<- MigrationProgress) error {
+       // Check if old templates exist
+       templatesMutex.RLock()
+       oldTemplatesPath := filepath.Join(oldDir, "templates.json")
+       _, err := os.Stat(oldTemplatesPath)
+       exists := !os.IsNotExist(err)
+       templatesMutex.RUnlock()
+
+       if err != nil && !os.IsNotExist(err) {
+               return fmt.Errorf("error checking if old templates exist: %v", err)
+       }
+
+       if !exists {
+               return nil // No templates to migrate
+       }
+
+       // Update progress
+       if progressChan != nil {
+               progressChan <- MigrationProgress{
+                       Phase:          "migrating_templates",
+                       CurrentItem:    "Reading templates",
+                       ProcessedItems: 1,
+                       TotalItems:     2,
+               }
+       }
+
+       // Read old templates
+       templatesMutex.RLock()
+       oldTemplatesBytes, err := os.ReadFile(oldTemplatesPath)
+       templatesMutex.RUnlock()
+
+       if err != nil {
+               return fmt.Errorf("error reading old templates: %v", err)
+       }
+
+       // Update progress
+       if progressChan != nil {
+               progressChan <- MigrationProgress{
+                       Phase:          "migrating_templates",
+                       CurrentItem:    "Writing templates",
+                       ProcessedItems: 1,
+                       TotalItems:     2,
+               }
+       }
+
+       // Templates are not encrypted, just copy
+       newTemplatesPath := filepath.Join(newDir, "templates.json")
+       templatesMutex.Lock()
+       err = os.WriteFile(newTemplatesPath, oldTemplatesBytes, 0644)
+       templatesMutex.Unlock()
+
+       if err != nil {
+               return fmt.Errorf("error writing new templates: %v", err)
+       }
+
+       // Update progress
+       if progressChan != nil {
+               progressChan <- MigrationProgress{
+                       Phase:          "migrating_templates",
+                       CurrentItem:    "Templates migration completed",
+                       ProcessedItems: 2,
+                       TotalItems:     2,
+               }
+       }
+
+       return nil
+}
+
+func migrateTags(oldDir, newDir string, oldKey []byte, newKey string, progressChan chan<- MigrationProgress) error {
+       // Check if old tags exist
+       tagsMutex.RLock()
+       oldTagsPath := filepath.Join(oldDir, "tags.json")
+       _, err := os.Stat(oldTagsPath)
+       exists := !os.IsNotExist(err)
+       tagsMutex.RUnlock()
+
+       if err != nil && !os.IsNotExist(err) {
+               return fmt.Errorf("error checking if old tags exist: %v", err)
+       }
+
+       if !exists {
+               return nil // No tags to migrate
+       }
+
+       // Update progress
+       if progressChan != nil {
+               progressChan <- MigrationProgress{
+                       Phase:          "migrating_tags",
+                       CurrentItem:    "Reading tags",
+                       ProcessedItems: 1,
+                       TotalItems:     2,
+               }
+       }
+
+       // Read old tags
+       tagsMutex.RLock()
+       oldTagsBytes, err := os.ReadFile(oldTagsPath)
+       tagsMutex.RUnlock()
+
+       if err != nil {
+               return fmt.Errorf("error reading old tags: %v", err)
+       }
+
+       // Update progress
+       if progressChan != nil {
+               progressChan <- MigrationProgress{
+                       Phase:          "migrating_tags",
+                       CurrentItem:    "Writing tags",
+                       ProcessedItems: 1,
+                       TotalItems:     2,
+               }
+       }
+
+       // Tags are not encrypted, just copy
+       newTagsPath := filepath.Join(newDir, "tags.json")
+       tagsMutex.Lock()
+       err = os.WriteFile(newTagsPath, oldTagsBytes, 0644)
+       tagsMutex.Unlock()
+
+       if err != nil {
+               return fmt.Errorf("error writing new tags: %v", err)
+       }
+
+       // Update progress
+       if progressChan != nil {
+               progressChan <- MigrationProgress{
+                       Phase:          "migrating_tags",
+                       CurrentItem:    "Tags migration completed",
+                       ProcessedItems: 2,
+                       TotalItems:     2,
+               }
+       }
+
+       return nil
+}
+
+func migrateLogs(oldDir, newDir string, oldKey []byte, newKey string, progressChan chan<- MigrationProgress) error {
+       // Get all year directories
+       logsMutex.RLock()
+       entries, err := os.ReadDir(oldDir)
+       logsMutex.RUnlock()
+
+       if err != nil {
+               return fmt.Errorf("error reading old directory: %v", err)
+       }
+
+       // Count total years and collect year names
+       totalYears := 0
+       var yearDirs []string
+       for _, entry := range entries {
+               if entry.IsDir() && isNumeric(entry.Name()) {
+                       totalYears++
+                       yearDirs = append(yearDirs, entry.Name())
+               }
+       }
+
+       // Update progress
+       if progressChan != nil {
+               progressChan <- MigrationProgress{
+                       Phase:          "migrating_logs",
+                       CurrentItem:    fmt.Sprintf("Found %d years to migrate", totalYears),
+                       ProcessedItems: 0,
+                       TotalItems:     totalYears,
+               }
+       }
+
+       yearCount := 0
+       for _, yearDir := range yearDirs {
+               oldYearPath := filepath.Join(oldDir, yearDir)
+               newYearPath := filepath.Join(newDir, yearDir)
+
+               // Create new year directory
+               logsMutex.Lock()
+               if err := os.MkdirAll(newYearPath, 0755); err != nil {
+                       logsMutex.Unlock()
+                       return fmt.Errorf("error creating new year directory: %v", err)
+               }
+               logsMutex.Unlock()
+
+               // Get all month files
+               logsMutex.RLock()
+               monthEntries, err := os.ReadDir(oldYearPath)
+               logsMutex.RUnlock()
+
+               if err != nil {
+                       return fmt.Errorf("error reading old year directory: %v", err)
+               }
+
+               // Count total months
+               var monthFiles []string
+               for _, monthEntry := range monthEntries {
+                       if !monthEntry.IsDir() && strings.HasSuffix(monthEntry.Name(), ".json") {
+                               monthFiles = append(monthFiles, monthEntry.Name())
+                       }
+               }
+
+               // Update progress for this year
+               if progressChan != nil {
+                       progressChan <- MigrationProgress{
+                               Phase:          "migrating_logs",
+                               CurrentItem:    fmt.Sprintf("Migrating year %s (%d/%d) - %d months", yearDir, yearCount+1, totalYears, len(monthFiles)),
+                               ProcessedItems: yearCount,
+                               TotalItems:     totalYears,
+                       }
+               }
+
+               monthCount := 0
+               for _, monthFile := range monthFiles {
+                       oldMonthPath := filepath.Join(oldYearPath, monthFile)
+                       newMonthPath := filepath.Join(newYearPath, monthFile)
+
+                       // Read old month
+                       logsMutex.RLock()
+                       oldMonthBytes, err := os.ReadFile(oldMonthPath)
+                       logsMutex.RUnlock()
+
+                       if err != nil {
+                               Logger.Printf("Error reading old month %s: %v", oldMonthPath, err)
+                               continue
+                       }
+
+                       // Parse old month
+                       var oldMonth map[string]any
+                       if err := json.Unmarshal(oldMonthBytes, &oldMonth); err != nil {
+                               Logger.Printf("Error parsing old month %s: %v", oldMonthPath, err)
+                               continue
+                       }
+
+                       // Decrypt all encrypted fields in logs
+                       for day, dayData := range oldMonth {
+                               dayMap, ok := dayData.(map[string]any)
+                               if !ok {
+                                       continue
+                               }
+
+                               if encrypted, ok := dayMap["content"].(string); ok {
+                                       // Decrypt content
+                                       plaintext, err := FernetDecrypt(encrypted, oldKey)
+                                       if err != nil {
+                                               Logger.Printf("Error decrypting content for day %s: %v", day, err)
+                                               continue
+                                       }
+
+                                       // Encrypt with new key
+                                       newEncrypted, err := EncryptText(string(plaintext), newKey)
+                                       if err != nil {
+                                               Logger.Printf("Error encrypting content for day %s: %v", day, err)
+                                               continue
+                                       }
+
+                                       dayMap["content"] = newEncrypted
+                               }
+                       }
+
+                       // Write new month
+                       newMonthBytes, err := json.Marshal(oldMonth)
+                       if err != nil {
+                               Logger.Printf("Error marshaling new month %s: %v", newMonthPath, err)
+                               continue
+                       }
+
+                       logsMutex.Lock()
+                       err = os.WriteFile(newMonthPath, newMonthBytes, 0644)
+                       logsMutex.Unlock()
+
+                       if err != nil {
+                               Logger.Printf("Error writing new month %s: %v", newMonthPath, err)
+                               continue
+                       }
+
+                       monthCount++
+                       if monthCount%5 == 0 && progressChan != nil && len(monthFiles) > 0 {
+                               // Update progress occasionally
+                               progressChan <- MigrationProgress{
+                                       Phase:          "migrating_logs",
+                                       CurrentItem:    fmt.Sprintf("Migrating year %s - month %s (%d/%d)", yearDir, monthFile, monthCount, len(monthFiles)),
+                                       ProcessedItems: yearCount*100 + monthCount,
+                                       TotalItems:     totalYears * 100, // Approximation
+                               }
+                       }
+               }
+
+               yearCount++
+               if progressChan != nil {
+                       progressChan <- MigrationProgress{
+                               Phase:          "migrating_logs",
+                               CurrentItem:    fmt.Sprintf("Completed year %s (%d/%d)", yearDir, yearCount, totalYears),
+                               ProcessedItems: yearCount,
+                               TotalItems:     totalYears,
+                       }
+               }
+       }
+
+       // Update progress
+       if progressChan != nil {
+               progressChan <- MigrationProgress{
+                       Phase:          "migrating_logs",
+                       CurrentItem:    "Logs migration completed",
+                       ProcessedItems: totalYears,
+                       TotalItems:     totalYears,
+               }
+       }
+
+       return nil
+}
+
+func migrateFiles(oldDir, newDir string, oldKey []byte, newKey string, progressChan chan<- MigrationProgress) error {
+       // Check if old files directory exists
+       filesMutex.RLock()
+       oldFilesDir := filepath.Join(oldDir, "files")
+       _, err := os.Stat(oldFilesDir)
+       exists := !os.IsNotExist(err)
+       filesMutex.RUnlock()
+
+       if err != nil && !os.IsNotExist(err) {
+               return fmt.Errorf("error checking if old files directory exists: %v", err)
+       }
+
+       if !exists {
+               return nil // No files to migrate
+       }
+
+       // Create new files directory
+       newFilesDir := filepath.Join(newDir, "files")
+       filesMutex.Lock()
+       if err := os.MkdirAll(newFilesDir, 0755); err != nil {
+               filesMutex.Unlock()
+               return fmt.Errorf("error creating new files directory: %v", err)
+       }
+       filesMutex.Unlock()
+
+       // Get all files
+       filesMutex.RLock()
+       entries, err := os.ReadDir(oldFilesDir)
+       filesMutex.RUnlock()
+
+       if err != nil {
+               return fmt.Errorf("error reading old files directory: %v", err)
+       }
+
+       totalFiles := len(entries)
+       fileCount := 0
+
+       // Update progress
+       if progressChan != nil {
+               progressChan <- MigrationProgress{
+                       Phase:          "migrating_files",
+                       CurrentItem:    fmt.Sprintf("Found %d files to migrate", totalFiles),
+                       ProcessedItems: 0,
+                       TotalItems:     totalFiles,
+               }
+       }
+
+       for idx, entry := range entries {
+               if entry.IsDir() {
+                       continue
+               }
+
+               fileName := entry.Name()
+               oldFilePath := filepath.Join(oldFilesDir, fileName)
+               newFilePath := filepath.Join(newFilesDir, fileName)
+
+               // Update progress occasionally
+               if progressChan != nil && (idx%5 == 0 || idx == 0) {
+                       progressChan <- MigrationProgress{
+                               Phase:          "migrating_files",
+                               CurrentItem:    fmt.Sprintf("Migrating file %s (%d/%d)", fileName, idx+1, totalFiles),
+                               ProcessedItems: idx,
+                               TotalItems:     totalFiles,
+                       }
+               }
+
+               // Read old file
+               filesMutex.RLock()
+               oldFileBytes, err := os.ReadFile(oldFilePath)
+               filesMutex.RUnlock()
+
+               if err != nil {
+                       Logger.Printf("Error reading old file %s: %v", oldFilePath, err)
+                       continue
+               }
+
+               // Decrypt file
+               plaintext, err := FernetDecrypt(string(oldFileBytes), oldKey)
+               if err != nil {
+                       Logger.Printf("Error decrypting file %s: %v", fileName, err)
+                       continue
+               }
+
+               // Encrypt with new key
+               newEncrypted, err := EncryptFile(plaintext, newKey)
+               if err != nil {
+                       Logger.Printf("Error encrypting file %s: %v", fileName, err)
+                       continue
+               }
+
+               // Write new file
+               filesMutex.Lock()
+               err = os.WriteFile(newFilePath, newEncrypted, 0644)
+               filesMutex.Unlock()
+
+               if err != nil {
+                       Logger.Printf("Error writing new file %s: %v", newFilePath, err)
+                       continue
+               }
+
+               fileCount++
+       }
+
+       // Update final progress
+       if progressChan != nil {
+               progressChan <- MigrationProgress{
+                       Phase:          "migrating_files",
+                       CurrentItem:    fmt.Sprintf("Files migration completed (%d files)", fileCount),
+                       ProcessedItems: fileCount,
+                       TotalItems:     totalFiles,
+               }
+       }
+
+       return nil
+}
+
+// isNumeric checks if a string contains only numeric characters
+func isNumeric(s string) bool {
+       for _, c := range s {
+               if c < '0' || c > '9' {
+                       return false
+               }
+       }
+       return len(s) > 0
+}
index 07e9e85ca7b0a9092d568c2db3fbe3e3ad8adc9e..b47968971960d40d658903f1195531e2220de72c 100644 (file)
@@ -3,105 +3,14 @@ package utils
 import (
        "crypto/rand"
        "encoding/base64"
-       "encoding/json"
        "fmt"
        "io"
-       "log"
-       "net/http"
-       "os"
        "time"
 
        "github.com/golang-jwt/jwt/v5"
        "golang.org/x/crypto/argon2"
 )
 
-// Global logger
-var Logger *log.Logger
-
-func init() {
-       // Initialize logger
-       Logger = log.New(os.Stdout, "dailytxt: ", log.LstdFlags|log.Lmicroseconds|log.Lshortfile)
-}
-
-// ContextKey is a type for context keys
-type ContextKey string
-
-// Context keys
-const (
-       UserIDKey     ContextKey = "userID"
-       UsernameKey   ContextKey = "username"
-       DerivedKeyKey ContextKey = "derivedKey"
-)
-
-// Settings holds the application settings
-type AppSettings struct {
-       DataPath        string   `json:"data_path"`
-       Development     bool     `json:"development"`
-       SecretToken     string   `json:"secret_token"`
-       LogoutAfterDays int      `json:"logout_after_days"`
-       AllowedHosts    []string `json:"allowed_hosts"`
-       Indent          int      `json:"indent"`
-}
-
-// Global settings
-var Settings AppSettings
-
-// InitSettings loads the application settings
-func InitSettings() error {
-       // Default settings
-       Settings = AppSettings{
-               DataPath:        "/data",
-               Development:     false,
-               SecretToken:     generateSecretToken(),
-               LogoutAfterDays: 30,
-               AllowedHosts:    []string{"http://localhost:5173", "http://127.0.0.1:5173"},
-               Indent:          0,
-       }
-
-       fmt.Print("\nDetected following settings:\n================\n")
-
-       // Override with environment variables if available
-       if dataPath := os.Getenv("DATA_PATH"); dataPath != "" {
-               Settings.DataPath = dataPath
-       }
-       fmt.Printf("Data Path: %s\n", Settings.DataPath)
-
-       if os.Getenv("DEVELOPMENT") == "true" {
-               Settings.Development = true
-       }
-       fmt.Printf("Development Mode: %t\n", Settings.Development)
-
-       if secretToken := os.Getenv("SECRET_TOKEN"); secretToken != "" {
-               Settings.SecretToken = secretToken
-       }
-       fmt.Printf("Secret Token: %s\n", Settings.SecretToken)
-
-       if logoutDays := os.Getenv("LOGOUT_AFTER_DAYS"); logoutDays != "" {
-               // Parse logoutDays to int
-               var days int
-               if _, err := fmt.Sscanf(logoutDays, "%d", &days); err == nil {
-                       Settings.LogoutAfterDays = days
-               }
-       }
-       fmt.Printf("Logout After Days: %d\n", Settings.LogoutAfterDays)
-
-       if indent := os.Getenv("INDENT"); indent != "" {
-               // Parse indent to int
-               var ind int
-               if _, err := fmt.Sscanf(indent, "%d", &ind); err == nil {
-                       Settings.Indent = ind
-               }
-       }
-       fmt.Printf("Indent: %d\n================\n\n", Settings.Indent)
-
-       // Create data directory if it doesn't exist
-       if err := os.MkdirAll(Settings.DataPath, 0755); err != nil {
-               return fmt.Errorf("failed to create data directory: %v", err)
-       }
-
-       return nil
-}
-
 // Claims represents the JWT claims
 type Claims struct {
        UserID     int    `json:"user_id"`
@@ -212,32 +121,11 @@ func DeriveKeyFromPassword(password, saltBase64 string) ([]byte, error) {
        return key, nil
 }
 
-// GenerateSecretToken generates a secure random token
-func generateSecretToken() string {
+// GenerateToken generates a secure random token
+func GenerateSecretToken() string {
        b := make([]byte, 32)
        if _, err := rand.Read(b); err != nil {
-               Logger.Fatalf("Failed to generate secret token: %v", err)
+               panic(fmt.Sprintf("Failed to generate secret token: %v", err))
        }
        return base64.URLEncoding.EncodeToString(b)
 }
-
-// JSONResponse sends a JSON response with the given status code and data
-func JSONResponse(w http.ResponseWriter, statusCode int, data any) {
-       // Set content type
-       w.Header().Set("Content-Type", "application/json")
-       w.WriteHeader(statusCode)
-
-       // Encode data to JSON
-       var encoder *json.Encoder
-       if Settings.Development && Settings.Indent > 0 {
-               encoder = json.NewEncoder(w)
-               encoder.SetIndent("", fmt.Sprintf("%*s", Settings.Indent, ""))
-       } else {
-               encoder = json.NewEncoder(w)
-       }
-
-       if err := encoder.Encode(data); err != nil {
-               Logger.Printf("Error encoding JSON response: %v", err)
-               http.Error(w, "Internal Server Error", http.StatusInternalServerError)
-       }
-}
index 1bf32e7d10f00fc73fc6c76e77f6157cdcdad7fe..267dd21588ae900f60cbfe8c7ab7a63ddb4aee7c 100644 (file)
@@ -18,6 +18,8 @@
        let registration_failed_message = $state('');
        let is_registering = $state(false);
 
+       let registration_allowed = $state(true);
+
        onMount(() => {
                // if params error=440 or error=401, show toast
                if (window.location.search.includes('error=440')) {
                        const toast = new bootstrap.Toast(document.getElementById('toastLoginInvalid'));
                        toast.show();
                }
+
+               // check if registration is allowed
+               checkRegistrationAllowed();
        });
 
+       function checkRegistrationAllowed() {
+               axios
+                       .get(API_URL + '/users/isRegistrationAllowed')
+                       .then((response) => {
+                               registration_allowed = response.data.registration_allowed;
+                       })
+                       .catch((error) => {
+                               console.error('Error checking registration allowed:', error);
+                               registration_allowed = false; // Default to false if there's an error
+                       });
+       }
+
        function handleLogin(event) {
                event.preventDefault();
 
                                }
                        })
                        .catch((error) => {
-                               console.error(error.response.data.detail);
-                               registration_failed_message = error.response.data.detail;
+                               console.error(error.response.data);
+                               registration_failed_message = error.response.data;
                                show_registration_failed_with_message = true;
                        })
                        .finally(() => {
                                                <form onsubmit={handleRegister}>
                                                        <div class="form-floating mb-3">
                                                                <input
+                                                                       disabled={!registration_allowed}
                                                                        type="text"
                                                                        class="form-control"
                                                                        id="registerUsername"
                                                        </div>
                                                        <div class="form-floating mb-3">
                                                                <input
+                                                                       disabled={!registration_allowed}
                                                                        type="password"
                                                                        class="form-control"
                                                                        id="registerPassword"
                                                        </div>
                                                        <div class="form-floating mb-3">
                                                                <input
+                                                                       disabled={!registration_allowed}
                                                                        type="password"
                                                                        class="form-control"
                                                                        id="registerPassword2"
                                                                />
                                                                <label for="registerPassword2">Password bestätigen</label>
                                                        </div>
+                                                       {#if !registration_allowed}
+                                                               <div class="alert alert-danger" role="alert">
+                                                                       Registrierung ist derzeit nicht erlaubt!
+                                                               </div>
+                                                       {/if}
                                                        {#if show_registration_failed_with_message}
                                                                <div class="alert alert-danger" role="alert">
                                                                        Registrierung fehlgeschlagen!<br />
                                                                <div class="alert alert-danger" role="alert">Passwörter stimmen nicht überein!</div>
                                                        {/if}
                                                        <div class="d-flex justify-content-center">
-                                                               <button type="submit" class="btn btn-primary" disabled={is_registering}>
+                                                               <button
+                                                                       type="submit"
+                                                                       class="btn btn-primary"
+                                                                       disabled={is_registering || !registration_allowed}
+                                                               >
                                                                        {#if is_registering}
                                                                                <div class="spinner-border spinner-border-sm" role="status">
                                                                                        <span class="visually-hidden">Loading...</span>
git clone https://git.99rst.org/PROJECT