diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000..7bef7d1 Binary files /dev/null and b/.DS_Store differ diff --git a/backend/.env.example b/backend/.env.example new file mode 100644 index 0000000..6856379 --- /dev/null +++ b/backend/.env.example @@ -0,0 +1,13 @@ +# MongoDB settings +MONGODB_URI=mongodb+srv://:@/?retryWrites=true&w=majority&appName= +DB_NAME=your_db_name + +# GitHub OAuth settings +GITHUB_CLIENT_ID=your_github_client_id +GITHUB_CLIENT_SECRET=your_github_client_secret + +# JWT settings +JWT_SECRET=your_jwt_secret + +# Server settings +PORT=8080 diff --git a/backend/.gitignore b/backend/.gitignore new file mode 100644 index 0000000..c375554 --- /dev/null +++ b/backend/.gitignore @@ -0,0 +1,45 @@ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, coverage, and results +*.test +*.out +*.coverprofile +*.coverage +*.cov + +# Output of go coverage tool +*.cov.out + +# Output of build tools +bin/ +build/ +dist/ + +# Logs +*.log + +# Dependency directories (go mod tidy will regenerate them) +vendor/ + +# IDE/editor files +.vscode/ +.idea/ +*.swp +*~ + +# OS-specific files +.DS_Store +Thumbs.db + +# Environment files +.env +.env.local + +# Go workspace file +go.work +go.work.sum diff --git a/backend/auth-api b/backend/auth-api new file mode 100755 index 0000000..ac97cd9 Binary files /dev/null and b/backend/auth-api differ diff --git a/backend/config/db.go b/backend/config/db.go new file mode 100644 index 0000000..e197c53 --- /dev/null +++ b/backend/config/db.go @@ -0,0 +1,59 @@ +package config + +import ( + "context" + "log" + "os" + "time" + + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +var ( + DB *mongo.Database + client *mongo.Client +) + +func ConnectDB() (*mongo.Database, error) { + // Return the existing DB if already connected + if DB != nil { + return DB, nil + } + + mongoURI := os.Getenv("MONGODB_URI") + if mongoURI == "" { + mongoURI = "mongodb://localhost:27017" + log.Println("Using default MongoDB URI:", mongoURI) + } + + clientOptions := options.Client().ApplyURI(mongoURI) + + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + var err error + client, err = mongo.Connect(ctx, clientOptions) + if err != nil { + return nil, err + } + + err = client.Ping(ctx, nil) + if err != nil { + return nil, err + } + + log.Println("Connected to MongoDB") + + dbName := os.Getenv("DB_NAME") + if dbName == "" { + dbName = "authDB" + } + DB = client.Database(dbName) + + return DB, nil +} + +func GetCollection(collectionName string) *mongo.Collection { + return DB.Collection(collectionName) +} diff --git a/backend/go.mod b/backend/go.mod new file mode 100644 index 0000000..3e5b44c --- /dev/null +++ b/backend/go.mod @@ -0,0 +1,45 @@ +module github.com/yourusername/backend + +go 1.24.4 + +require ( + github.com/bytedance/sonic v1.13.3 // indirect + github.com/bytedance/sonic/loader v0.2.4 // indirect + github.com/cloudwego/base64x v0.1.5 // indirect + github.com/cloudwego/iasm v0.2.0 // indirect + github.com/gabriel-vasile/mimetype v1.4.9 // indirect + github.com/gin-contrib/sse v1.1.0 // indirect + github.com/gin-gonic/gin v1.10.1 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/go-playground/validator/v10 v10.26.0 // indirect + github.com/goccy/go-json v0.10.5 // indirect + github.com/golang-jwt/jwt/v4 v4.5.2 // indirect + github.com/golang/snappy v1.0.0 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/joho/godotenv v1.5.1 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/klauspost/compress v1.18.0 // indirect + github.com/klauspost/cpuid/v2 v2.2.10 // indirect + github.com/leodido/go-urn v1.4.0 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/montanaflynn/stats v0.7.1 // indirect + github.com/pelletier/go-toml/v2 v2.2.4 // indirect + github.com/twitchyliquid64/golang-asm v0.15.1 // indirect + github.com/ugorji/go/codec v1.2.14 // indirect + github.com/xdg-go/pbkdf2 v1.0.0 // indirect + github.com/xdg-go/scram v1.1.2 // indirect + github.com/xdg-go/stringprep v1.0.4 // indirect + github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect + go.mongodb.org/mongo-driver v1.17.4 // indirect + golang.org/x/arch v0.18.0 // indirect + golang.org/x/crypto v0.39.0 // indirect + golang.org/x/net v0.41.0 // indirect + golang.org/x/sync v0.15.0 // indirect + golang.org/x/sys v0.33.0 // indirect + golang.org/x/text v0.26.0 // indirect + google.golang.org/protobuf v1.36.6 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/backend/go.sum b/backend/go.sum new file mode 100644 index 0000000..58312aa --- /dev/null +++ b/backend/go.sum @@ -0,0 +1,123 @@ +github.com/bytedance/sonic v1.13.3 h1:MS8gmaH16Gtirygw7jV91pDCN33NyMrPbN7qiYhEsF0= +github.com/bytedance/sonic v1.13.3/go.mod h1:o68xyaF9u2gvVBuGHPlUVCy+ZfmNNO5ETf1+KgkJhz4= +github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= +github.com/bytedance/sonic/loader v0.2.4 h1:ZWCw4stuXUsn1/+zQDqeE7JKP+QO47tz7QCNan80NzY= +github.com/bytedance/sonic/loader v0.2.4/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI= +github.com/cloudwego/base64x v0.1.5 h1:XPciSp1xaq2VCSt6lF0phncD4koWyULpl5bUxbfCyP4= +github.com/cloudwego/base64x v0.1.5/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w= +github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg= +github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY= +github.com/gabriel-vasile/mimetype v1.4.9/go.mod h1:WnSQhFKJuBlRyLiKohA/2DtIlPFAbguNaG7QCHcyGok= +github.com/gin-contrib/sse v1.1.0 h1:n0w2GMuUpWDVp7qSpvze6fAu9iRxJY4Hmj6AmBOU05w= +github.com/gin-contrib/sse v1.1.0/go.mod h1:hxRZ5gVpWMT7Z0B0gSNYqqsSCNIJMjzvm6fqCz9vjwM= +github.com/gin-gonic/gin v1.10.1 h1:T0ujvqyCSqRopADpgPgiTT63DUQVSfojyME59Ei63pQ= +github.com/gin-gonic/gin v1.10.1/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.26.0 h1:SP05Nqhjcvz81uJaRfEV0YBSSSGMc/iMaVtFbr3Sw2k= +github.com/go-playground/validator/v10 v10.26.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo= +github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4= +github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= +github.com/golang-jwt/jwt/v4 v4.5.2 h1:YtQM7lnr8iZ+j5q71MGKkNw9Mn7AjHM68uc9g5fXeUI= +github.com/golang-jwt/jwt/v4 v4.5.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= +github.com/golang/snappy v1.0.0 h1:Oy607GVXHs7RtbggtPBnr2RmDArIsAefDwvrdWvRhGs= +github.com/golang/snappy v1.0.0/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= +github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= +github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/klauspost/cpuid/v2 v2.2.10 h1:tBs3QSyvjDyFTq3uoc/9xFpCuOsJQFNPiAhYdw2skhE= +github.com/klauspost/cpuid/v2 v2.2.10/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0= +github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= +github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= +github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE= +github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= +github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4= +github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= +github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= +github.com/ugorji/go/codec v1.2.14 h1:yOQvXCBc3Ij46LRkRoh4Yd5qK6LVOgi0bYOXfb7ifjw= +github.com/ugorji/go/codec v1.2.14/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= +github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= +github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= +github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY= +github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4= +github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8= +github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM= +github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 h1:ilQV1hzziu+LLM3zUTJ0trRztfwgjqKnBWNtSRkbmwM= +github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +go.mongodb.org/mongo-driver v1.17.4 h1:jUorfmVzljjr0FLzYQsGP8cgN/qzzxlY9Vh0C9KFXVw= +go.mongodb.org/mongo-driver v1.17.4/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ= +golang.org/x/arch v0.18.0 h1:WN9poc33zL4AzGxqf8VtpKUnGvMi8O9lhNyBMF/85qc= +golang.org/x/arch v0.18.0/go.mod h1:bdwinDaKcfZUGpH09BB7ZmOfhalA8lQdzl62l8gGWsk= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM= +golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw= +golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8= +golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= +golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= +golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M= +golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= +google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50= +rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= diff --git a/backend/handlers/auth.go b/backend/handlers/auth.go new file mode 100644 index 0000000..cb1d7b5 --- /dev/null +++ b/backend/handlers/auth.go @@ -0,0 +1,301 @@ +package handlers + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "log" + "net/http" + "os" + "time" + + "github.com/gin-gonic/gin" + "github.com/yourusername/backend/config" + "github.com/yourusername/backend/models" + "github.com/yourusername/backend/utils" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" + "go.mongodb.org/mongo-driver/mongo" +) + +func GitHubSignUp(c *gin.Context) { + var authRequest models.GitHubAuthRequest + if err := c.ShouldBindJSON(&authRequest); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + tokenResponse, err := exchangeCodeForToken(authRequest.Code) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to exchange code for token: " + err.Error()}) + return + } + + githubUser, err := getGitHubUser(tokenResponse.AccessToken) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to get GitHub user: " + err.Error()}) + return + } + + collection := config.GetCollection("users") + var existingUser models.User + + err = collection.FindOne(context.Background(), bson.M{"github_id": githubUser.ID}).Decode(&existingUser) + if err != nil && err != mongo.ErrNoDocuments { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Database error: " + err.Error()}) + return + } + + if err == nil { + c.JSON(http.StatusConflict, gin.H{"error": "User already exists. Please sign in instead."}) + return + } + + now := time.Now() + newUser := models.User{ + ID: primitive.NewObjectID(), + GitHubID: githubUser.ID, + Username: githubUser.Login, + Email: githubUser.Email, + Name: githubUser.Name, + AvatarURL: githubUser.AvatarURL, + AccessToken: tokenResponse.AccessToken, + RefreshToken: tokenResponse.RefreshToken, + CreatedAt: now, + UpdatedAt: now, + } + + _, err = collection.InsertOne(context.Background(), newUser) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to create user: " + err.Error()}) + return + } + + token, err := utils.GenerateToken(newUser.ID) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to generate token: " + err.Error()}) + return + } + + newUser.AccessToken = "" + newUser.RefreshToken = "" + + c.JSON(http.StatusCreated, models.AuthResponse{ + Token: token, + User: newUser, + }) +} + +func GitHubSignIn(c *gin.Context) { + var authRequest models.GitHubAuthRequest + if err := c.ShouldBindJSON(&authRequest); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + tokenResponse, err := exchangeCodeForToken(authRequest.Code) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to exchange code for token: " + err.Error()}) + return + } + githubUser, err := getGitHubUser(tokenResponse.AccessToken) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to get GitHub user: " + err.Error()}) + return + } + + collection := config.GetCollection("users") + var user models.User + + err = collection.FindOne(context.Background(), bson.M{"github_id": githubUser.ID}).Decode(&user) + if err != nil { + if err == mongo.ErrNoDocuments { + c.JSON(http.StatusNotFound, gin.H{"error": "User not found. Please sign up instead."}) + } else { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Database error: " + err.Error()}) + } + return + } + _, err = collection.UpdateOne( + context.Background(), + bson.M{"_id": user.ID}, + bson.M{ + "$set": bson.M{ + "access_token": tokenResponse.AccessToken, + "refresh_token": tokenResponse.RefreshToken, + "updated_at": time.Now(), + }, + }, + ) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to update user: " + err.Error()}) + return + } + + token, err := utils.GenerateToken(user.ID) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to generate token: " + err.Error()}) + return + } + + user.AccessToken = "" + user.RefreshToken = "" + + c.JSON(http.StatusOK, models.AuthResponse{ + Token: token, + User: user, + }) +} + +func exchangeCodeForToken(code string) (*models.GitHubTokenResponse, error) { + clientID := os.Getenv("GITHUB_CLIENT_ID") + clientSecret := os.Getenv("GITHUB_CLIENT_SECRET") + + if clientID == "" || clientSecret == "" { + return nil, fmt.Errorf("GITHUB_CLIENT_ID and GITHUB_CLIENT_SECRET must be set in environment variables") + } + + requestBody, err := json.Marshal(map[string]string{ + "client_id": clientID, + "client_secret": clientSecret, + "code": code, + }) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", "https://github.com/login/oauth/access_token", bytes.NewBuffer(requestBody)) + if err != nil { + return nil, err + } + + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Accept", "application/json") + + client := &http.Client{} + resp, err := client.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, err + } + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("GitHub OAuth endpoint returned status code %d: %s", resp.StatusCode, string(body)) + } + + var tokenResponse models.GitHubTokenResponse + if err := json.Unmarshal(body, &tokenResponse); err != nil { + return nil, err + } + + if tokenResponse.AccessToken == "" { + return nil, fmt.Errorf("GitHub OAuth endpoint did not return an access token") + } + + return &tokenResponse, nil +} + +func getGitHubUser(accessToken string) (*models.GitHubUser, error) { + req, err := http.NewRequest("GET", "https://api.github.com/user", nil) + if err != nil { + return nil, err + } + + req.Header.Set("Authorization", "token "+accessToken) + req.Header.Set("Accept", "application/vnd.github.v3+json") + + client := &http.Client{} + resp, err := client.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, err + } + + if resp.StatusCode != http.StatusOK { + log.Printf("GitHub API returned status code %d: %s", resp.StatusCode, string(body)) + return nil, fmt.Errorf("GitHub API returned status code %d", resp.StatusCode) + } + + var user models.GitHubUser + if err := json.Unmarshal(body, &user); err != nil { + return nil, err + } + + if user.Email == "" { + email, err := getGitHubUserEmails(accessToken) + if err != nil { + log.Printf("Warning: Could not fetch user email: %v", err) + } else if email != "" { + user.Email = email + } + } + + return &user, nil +} + +func getGitHubUserEmails(accessToken string) (string, error) { + req, err := http.NewRequest("GET", "https://api.github.com/user/emails", nil) + if err != nil { + return "", err + } + + req.Header.Set("Authorization", "token "+accessToken) + req.Header.Set("Accept", "application/vnd.github.v3+json") + + client := &http.Client{} + resp, err := client.Do(req) + if err != nil { + return "", err + } + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return "", err + } + + if resp.StatusCode != http.StatusOK { + log.Printf("GitHub API (emails) returned status code %d: %s", resp.StatusCode, string(body)) + return "", fmt.Errorf("GitHub API returned status code %d", resp.StatusCode) + } + + type GitHubEmail struct { + Email string `json:"email"` + Primary bool `json:"primary"` + Verified bool `json:"verified"` + } + + var emails []GitHubEmail + if err := json.Unmarshal(body, &emails); err != nil { + return "", err + } + + for _, email := range emails { + if email.Primary && email.Verified { + return email.Email, nil + } + } + + for _, email := range emails { + if email.Verified { + return email.Email, nil + } + } + + if len(emails) > 0 { + return emails[0].Email, nil + } + + return "", fmt.Errorf("no email found for user") +} diff --git a/backend/handlers/contributions.go b/backend/handlers/contributions.go new file mode 100644 index 0000000..6a93a5a --- /dev/null +++ b/backend/handlers/contributions.go @@ -0,0 +1,44 @@ +package handlers + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + + "github.com/gin-gonic/gin" +) + +func GetGitHubContributions(c *gin.Context) { + username := c.Query("username") + if username == "" { + username = c.GetString("github_username") + } + if username == "" { + c.JSON(http.StatusBadRequest, gin.H{"error": "GitHub username not found"}) + return + } + year := c.DefaultQuery("year", "last") + url := fmt.Sprintf("https://github-contributions-api.jogruber.de/v4/%s?y=%s", username, year) + + resp, err := http.Get(url) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to fetch contributions"}) + return + } + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to read response"}) + return + } + + var data interface{} + if err := json.Unmarshal(body, &data); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to parse response"}) + return + } + + c.JSON(http.StatusOK, data) +} diff --git a/backend/handlers/coverage.go b/backend/handlers/coverage.go new file mode 100644 index 0000000..81f58a7 --- /dev/null +++ b/backend/handlers/coverage.go @@ -0,0 +1,1376 @@ +package handlers + +import ( + "bufio" + "context" + "errors" + "fmt" + "log" + "net/http" + "os" + "os/exec" + "path/filepath" + "regexp" + "strconv" + "strings" + "sync" + "time" + "github.com/gin-gonic/gin" + "github.com/yourusername/backend/config" + "github.com/yourusername/backend/models" + "github.com/yourusername/backend/pythonutils" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" + "go.mongodb.org/mongo-driver/mongo" +) + +type CoverageRequest struct { + RepoURL string `json:"repo_url" binding:"required"` + Branch string `json:"branch"` +} + +type CoverageAsyncRequest struct { + RepoURL string `json:"repo_url" binding:"required"` + Branch string `json:"branch"` + Async bool `json:"async"` +} + +type FileCoverage struct { + File string `json:"file"` + Coverage float64 `json:"coverage"` +} + +type CoverageResponse struct { + TotalCoverage float64 `json:"total_coverage"` + Files []FileCoverage `json:"files"` + ID string `json:"id,omitempty"` + Repository string `json:"repository,omitempty"` + Branch string `json:"branch,omitempty"` + Timestamp string `json:"timestamp,omitempty"` + CommitHash string `json:"commit_hash,omitempty"` +} + +type JobStatus struct { + JobID string `json:"job_id"` + Status string `json:"status"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + ResultID string `json:"result_id,omitempty"` + Error string `json:"error,omitempty"` +} + +type FileStats struct { + TotalExecutableLines int + CoveredLines int +} + +var completedJobs = make(map[string]*JobStatus) +var jobsMutex sync.RWMutex + +// Checks if a job is complete +func isJobComplete(jobID string) (bool, *JobStatus) { + jobsMutex.RLock() + defer jobsMutex.RUnlock() + + if job, exists := completedJobs[jobID]; exists { + return true, job + } + return false, nil +} + +// Marks a job as complete and schedules cleanup +func markJobComplete(jobID string, status string, resultID string, err string) { + jobsMutex.Lock() + defer jobsMutex.Unlock() + + completedJobs[jobID] = &JobStatus{ + JobID: jobID, + Status: status, + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + ResultID: resultID, + Error: err, + } + go func(id string) { + time.Sleep(30 * time.Minute) + jobsMutex.Lock() + delete(completedJobs, id) + jobsMutex.Unlock() + }(jobID) +} + +// Handles coverage scan request (sync/async) +func RunCoverageScan(c *gin.Context) { + var req CoverageAsyncRequest + if err := c.ShouldBindJSON(&req); err != nil { + log.Printf("ERROR: Invalid coverage scan request: %v", err) + c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid request"}) + return + } + + log.Printf("INFO: Starting coverage scan for repo: %s, branch: %s", req.RepoURL, req.Branch) + + if req.Async { + log.Printf("INFO: Starting asynchronous coverage scan for large repo") + jobID := primitive.NewObjectID().Hex() + db, err := config.ConnectDB() + if err != nil { + log.Printf("ERROR: Failed to connect to database: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Database connection failed"}) + return + } + + collection := db.Collection("coverage_jobs") + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + + jobDoc := bson.M{ + "job_id": jobID, + "repository": req.RepoURL, + "branch": req.Branch, + "status": "in_progress", + "created_at": time.Now(), + "updated_at": time.Now(), + } + + _, err = collection.InsertOne(ctx, jobDoc) + if err != nil { + log.Printf("ERROR: Failed to save job to database: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to create job"}) + return + } + go func() { + defer func() { + if r := recover(); r != nil { + log.Printf("ERROR: Panic in async coverage scan: %v", r) + updateJobStatus(jobID, "failed", "", "Internal server error") + } + }() + + coverageReq := CoverageRequest{ + RepoURL: req.RepoURL, + Branch: req.Branch, + } + resp, err := scanCoverage(coverageReq, true) + + if err != nil { + log.Printf("ERROR: Async coverage scan failed: %v", err) + updateJobStatus(jobID, "failed", "", err.Error()) + return + } + + log.Printf("INFO: Async coverage scan completed successfully for job %s", jobID) + updateJobStatus(jobID, "completed", resp.ID, "") + }() + + c.JSON(http.StatusAccepted, gin.H{"job_id": jobID, "status": "in_progress"}) + return + } + coverageReq := CoverageRequest{ + RepoURL: req.RepoURL, + Branch: req.Branch, + } + resp, err := scanCoverage(coverageReq, false) + if err != nil { + log.Printf("ERROR: Coverage scan failed: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, resp) +} + +func updateJobStatus(jobID, status, resultID, errorMsg string) { + db, err := config.ConnectDB() + if err != nil { + log.Printf("ERROR: Failed to connect to database to update job status: %v", err) + return + } + + collection := db.Collection("coverage_jobs") + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + update := bson.M{ + "$set": bson.M{ + "status": status, + "updated_at": time.Now(), + }, + } + + if resultID != "" { + update["$set"].(bson.M)["result_id"] = resultID + } + + if errorMsg != "" { + update["$set"].(bson.M)["error"] = errorMsg + } + + _, err = collection.UpdateOne(ctx, bson.M{"job_id": jobID}, update) + if err != nil { + log.Printf("ERROR: Failed to update job status in database: %v", err) + } else { + log.Printf("INFO: Updated job %s status to %s", jobID, status) + if status == "completed" || status == "failed" { + markJobComplete(jobID, status, resultID, errorMsg) + } + } +} + +// Checks if a file exists and is not a directory +func fileExists(path string) bool { + info, err := os.Stat(path) + return err == nil && !info.IsDir() +} + +// Checks if directory contains Go files (excluding test files) +func hasGoFiles(dir string) bool { + files, err := os.ReadDir(dir) + if err != nil { + return false + } + for _, file := range files { + if !file.IsDir() && strings.HasSuffix(file.Name(), ".go") { + if !strings.HasSuffix(file.Name(), "_test.go") { + return true + } + } + } + return false +} + +// Detects Go modules and packages in a directory +func detectGoStructure(dir string, logPrefix string) ([]string, error) { + log.Printf("INFO: %s Fast Go project structure analysis", logPrefix) + + // Check root first for go.mod - most common case + if fileExists(filepath.Join(dir, "go.mod")) { + log.Printf("INFO: %s Found go.mod in root, using single module approach", logPrefix) + return []string{dir}, nil + } + + // Quick scan for Go files in root + if hasGoFiles(dir) { + log.Printf("INFO: %s Found Go files in root without go.mod", logPrefix) + return []string{dir}, nil + } + + // Only scan subdirectories if root doesn't have Go code + var goModules []string + var goPackages []string + maxDepth := 3 // Limit search depth for performance + + err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + + // Calculate depth and skip if too deep + relPath, _ := filepath.Rel(dir, path) + depth := strings.Count(relPath, string(filepath.Separator)) + if depth > maxDepth { + return filepath.SkipDir + } + + if info.IsDir() { + name := info.Name() + // Skip common non-Go directories early + if strings.HasPrefix(name, ".") || + name == "vendor" || + name == "node_modules" || + name == "build" || + name == "dist" || + name == "target" || + name == "docs" || + name == "examples" { + return filepath.SkipDir + } + } + + if info.Name() == "go.mod" { + moduleDir := filepath.Dir(path) + goModules = append(goModules, moduleDir) + log.Printf("INFO: %s Found Go module at: %s", logPrefix, moduleDir) + } + + if info.IsDir() && path != dir && hasGoFiles(path) { + goPackages = append(goPackages, path) + } + + return nil + }) + + if err != nil { + log.Printf("WARNING: %s Error walking directory: %v", logPrefix, err) + } + + // Prioritize modules over packages + if len(goModules) > 0 { + return goModules, nil + } + + if len(goPackages) > 0 { + return goPackages, nil + } + + return nil, errors.New("no Go code found") +} + + +func runGoModuleCoverage(dir string, logPrefix string) (float64, []FileCoverage, error) { + log.Printf("INFO: %s Running optimized Go coverage in: %s", logPrefix, dir) + + hasGoMod := fileExists(filepath.Join(dir, "go.mod")) + coverageFile := filepath.Join(dir, "coverage.out") + os.Remove(coverageFile) // Clean up any existing file + + // Create context with timeout to prevent hanging + ctx, cancel := context.WithTimeout(context.Background(), 3*time.Minute) // Reduced timeout + defer cancel() + + var cmd *exec.Cmd + + // Use the most likely to succeed command first based on project structure + if hasGoMod { + // For modules, start with the most common successful pattern + cmd = exec.CommandContext(ctx, "go", "test", "-coverprofile=coverage.out", "-covermode=count", "./...") + } else { + cmd = exec.CommandContext(ctx, "go", "test", "-coverprofile=coverage.out", "-covermode=count", ".") + } + + cmd.Dir = dir + cmd.Env = append(os.Environ(), + "GO111MODULE=on", + "CGO_ENABLED=0", + "GOCACHE="+filepath.Join(os.TempDir(), "gocache"), // Use temp cache to avoid conflicts + ) + + output, err := cmd.CombinedOutput() + + // If main command fails, try one quick fallback + if err != nil { + log.Printf("WARNING: %s Primary coverage command failed, trying fallback: %v", logPrefix, err) + + // Single fallback attempt with set mode (faster) + var fallbackCmd *exec.Cmd + if hasGoMod { + fallbackCmd = exec.CommandContext(ctx, "go", "test", "-coverprofile=coverage.out", "-covermode=set", "./...") + } else { + fallbackCmd = exec.CommandContext(ctx, "go", "test", "-coverprofile=coverage.out", "-covermode=set", ".") + } + + fallbackCmd.Dir = dir + fallbackCmd.Env = cmd.Env + + output, err = fallbackCmd.CombinedOutput() + if err != nil { + log.Printf("WARNING: %s Fallback also failed: %v", logPrefix, err) + } + } + + // Check if coverage file was created + if fileExists(coverageFile) { + log.Printf("INFO: %s Coverage file created, parsing results", logPrefix) + + // Try go tool cover first (faster and more reliable) + if coverageData, parseErr := parseWithGoToolCover(coverageFile, dir); parseErr == nil && coverageData.TotalCoverage >= 0 { + log.Printf("INFO: %s Successfully parsed coverage using go tool cover: %.2f%%", logPrefix, coverageData.TotalCoverage) + os.Remove(coverageFile) + return coverageData.TotalCoverage, coverageData.Files, nil + } + + // Fallback to manual parsing + coverageData, parseErr := parseCoverageFile(coverageFile) + if parseErr != nil { + log.Printf("ERROR: %s Failed to parse coverage file: %v", logPrefix, parseErr) + os.Remove(coverageFile) + return 0.0, []FileCoverage{}, parseErr + } + + os.Remove(coverageFile) + log.Printf("INFO: %s Successfully parsed coverage manually: %.2f%%", logPrefix, coverageData.TotalCoverage) + return coverageData.TotalCoverage, coverageData.Files, nil + } + + // Quick check for coverage in output + if strings.Contains(string(output), "coverage:") { + coverage := parseSimpleCoverageOutput(string(output)) + if coverage > 0 { + log.Printf("INFO: %s Extracted coverage from command output: %.2f%%", logPrefix, coverage) + return coverage, []FileCoverage{}, nil + } + } + + return 0.0, []FileCoverage{}, err +} + +func processGoDirectoriesInParallel(goDirectories []string, tmpDir string, logPrefix string) (CoverageResponse, bool) { + log.Printf("INFO: %s Processing %d Go directories in parallel", logPrefix, len(goDirectories)) + + type CoverageResult struct { + Coverage float64 + Files []FileCoverage + Error error + Dir string + } + + // Limit concurrency to prevent resource exhaustion + maxWorkers := min(len(goDirectories), 3) // Conservative limit + resultsChan := make(chan CoverageResult, len(goDirectories)) + semaphore := make(chan struct{}, maxWorkers) + + var wg sync.WaitGroup + + for _, dir := range goDirectories { + wg.Add(1) + go func(directory string) { + defer wg.Done() + semaphore <- struct{}{} // Acquire + defer func() { <-semaphore }() // Release + + relPath, _ := filepath.Rel(tmpDir, directory) + if relPath == "." { + relPath = "root" + } + + coverage, files, err := runGoModuleCoverage(directory, logPrefix) + resultsChan <- CoverageResult{ + Coverage: coverage, + Files: files, + Error: err, + Dir: relPath, + } + }(dir) + } + + // Close channel when all workers complete + go func() { + wg.Wait() + close(resultsChan) + }() + + // Collect results + var allFiles []FileCoverage + var totalCoverage float64 + var validResults int + + for result := range resultsChan { + if result.Error != nil { + log.Printf("WARNING: %s Failed to get coverage for %s: %v", logPrefix, result.Dir, result.Error) + continue + } + + if result.Coverage <= 0 { + continue + } + + log.Printf("INFO: %s Got %.2f%% coverage from %s", logPrefix, result.Coverage, result.Dir) + totalCoverage += result.Coverage + validResults++ + + // Prefix file paths with directory name if not root + if result.Dir != "root" { + for i := range result.Files { + result.Files[i].File = filepath.Join(result.Dir, result.Files[i].File) + } + } + allFiles = append(allFiles, result.Files...) + } + + if validResults > 0 { + finalCoverage := totalCoverage / float64(validResults) + log.Printf("INFO: %s Calculated average coverage: %.2f%% from %d directories", + logPrefix, finalCoverage, validResults) + + return CoverageResponse{ + TotalCoverage: finalCoverage, + Files: allFiles, + }, true + } + + return CoverageResponse{}, false +} + + +// Detects project type based on files present +func parseSimpleCoverageOutput(output string) float64 { + patterns := []string{ + `coverage:\s*([0-9]+(?:\.[0-9]+)?)%`, + `Total coverage:\s*([0-9]+(?:\.[0-9]+)?)%`, + `TOTAL.*?([0-9]+(?:\.[0-9]+)?)%`, + `([0-9]+(?:\.[0-9]+)?)%\s+of\s+statements`, + } + + for _, pattern := range patterns { + re := regexp.MustCompile(pattern) + matches := re.FindStringSubmatch(output) + if len(matches) >= 2 { + if val, err := strconv.ParseFloat(matches[1], 64); err == nil { + log.Printf("INFO: Extracted coverage using pattern '%s': %.2f%%", pattern, val) + return val + } + } + } + + log.Printf("WARNING: No coverage percentage found in output") + return 0.0 +} + +// Fallback: runs 'go test -cover' and parses output +func fallbackCoverage(dir string) (CoverageResponse, error) { + cmd := exec.Command("go", "test", "-cover") + cmd.Dir = dir + out, err := cmd.CombinedOutput() + if err != nil { + return CoverageResponse{}, err + } + re := regexp.MustCompile(`coverage: ([0-9]+\.[0-9]+)% of statements`) + matches := re.FindStringSubmatch(string(out)) + if len(matches) < 2 { + return CoverageResponse{}, errors.New("no coverage info") + } + val, _ := strconv.ParseFloat(matches[1], 64) + return CoverageResponse{TotalCoverage: val, Files: []FileCoverage{}}, nil +} + +// Fallback: runs 'gocov test ./...' and parses output +func fallbackGocov(dir string) (CoverageResponse, error) { + cmd := exec.Command("gocov", "test", "./...") + cmd.Dir = dir + out, err := cmd.CombinedOutput() + if err != nil { + return CoverageResponse{}, err + } + re := regexp.MustCompile(`"percent":\s*([0-9]+\.?[0-9]*)`) + match := re.FindStringSubmatch(string(out)) + if len(match) < 2 { + return CoverageResponse{}, errors.New("no gocov coverage info") + } + val, _ := strconv.ParseFloat(match[1], 64) + return CoverageResponse{TotalCoverage: val, Files: []FileCoverage{}}, nil +} + +// Fallback: runs 'go tool cover -func=coverage.out' and parses output +func fallbackGocover(dir string) (CoverageResponse, error) { + covFile := filepath.Join(dir, "coverage.out") + if !fileExists(covFile) { + minimalContent := "mode: set\n" + if err := os.WriteFile(covFile, []byte(minimalContent), 0644); err != nil { + return CoverageResponse{TotalCoverage: 0.0, Files: []FileCoverage{}}, nil + } + } + cmd := exec.Command("go", "tool", "cover", "-func=coverage.out") + cmd.Dir = dir + out, err := cmd.CombinedOutput() + if err != nil { + return CoverageResponse{TotalCoverage: 0.0, Files: []FileCoverage{}}, nil + } + scanner := bufio.NewScanner(strings.NewReader(string(out))) + for scanner.Scan() { + line := scanner.Text() + if strings.HasPrefix(line, "total:") { + parts := strings.Fields(line) + if len(parts) >= 3 && strings.HasSuffix(parts[2], "%") { + valStr := strings.TrimSuffix(parts[2], "%") + if val, err := strconv.ParseFloat(valStr, 64); err == nil { + return CoverageResponse{TotalCoverage: val, Files: []FileCoverage{}}, nil + } + } + } + } + return CoverageResponse{TotalCoverage: 0.0, Files: []FileCoverage{}}, nil +} + +func parseCoverageFile(path string) (CoverageResponse, error) { + file, err := os.Open(path) + if err != nil { + return CoverageResponse{}, fmt.Errorf("failed to open coverage file: %v", err) + } + defer file.Close() + + scanner := bufio.NewScanner(file) + + if !scanner.Scan() { + return CoverageResponse{}, errors.New("empty coverage file") + } + + firstLine := scanner.Text() + if !strings.HasPrefix(firstLine, "mode:") { + return CoverageResponse{}, errors.New("invalid coverage file format") + } + + log.Printf("INFO: Coverage file mode: %s", firstLine) + + totalExecutableLines := 0 + totalCoveredLines := 0 + fileData := make(map[string]*FileStats) + + lineCount := 0 + for scanner.Scan() { + line := strings.TrimSpace(scanner.Text()) + if line == "" { + continue + } + + lineCount++ + + parts := strings.Fields(line) + if len(parts) < 3 { + log.Printf("WARNING: Skipping malformed coverage line: %s", line) + continue + } + location := parts[0] + colonIndex := strings.Index(location, ":") + if colonIndex == -1 { + log.Printf("WARNING: No colon found in location: %s", location) + continue + } + filename := location[:colonIndex] + + numStmt, err1 := strconv.Atoi(parts[1]) + execCount, err2 := strconv.Atoi(parts[2]) + + if err1 != nil || err2 != nil { + log.Printf("WARNING: Failed to parse coverage line: %s (numStmt: %v, execCount: %v)", line, err1, err2) + continue + } + + if fileData[filename] == nil { + fileData[filename] = &FileStats{ + TotalExecutableLines: 0, + CoveredLines: 0, + } + } + fileStats := fileData[filename] + fileStats.TotalExecutableLines += numStmt + + if execCount > 0 { + fileStats.CoveredLines += numStmt + } + totalExecutableLines += numStmt + if execCount > 0 { + totalCoveredLines += numStmt + } + if lineCount <= 5 { + log.Printf("DEBUG: Line %d - File: %s, Statements: %d, ExecCount: %d, Covered: %v", + lineCount, filename, numStmt, execCount, execCount > 0) + } + } + + if err := scanner.Err(); err != nil { + return CoverageResponse{}, fmt.Errorf("error reading coverage file: %v", err) + } + + log.Printf("INFO: Parsed %d coverage lines, total executable lines: %d, covered lines: %d", + lineCount, totalExecutableLines, totalCoveredLines) + + totalCoverage := 0.0 + if totalExecutableLines > 0 { + totalCoverage = float64(totalCoveredLines) * 100.0 / float64(totalExecutableLines) + } + files := make([]FileCoverage, 0, len(fileData)) + for filename, stats := range fileData { + fileCoverage := 0.0 + if stats.TotalExecutableLines > 0 { + fileCoverage = float64(stats.CoveredLines) * 100.0 / float64(stats.TotalExecutableLines) + } + cleanFilename := cleanupFilename(filename) + + files = append(files, FileCoverage{ + File: cleanFilename, + Coverage: fileCoverage, + }) + if len(files) <= 5 { + log.Printf("DEBUG: File %s - Executable: %d, Covered: %d, Coverage: %.2f%%", + cleanFilename, stats.TotalExecutableLines, stats.CoveredLines, fileCoverage) + } + } + + log.Printf("INFO: Final calculated coverage: %.2f%% (%d covered out of %d executable lines)", + totalCoverage, totalCoveredLines, totalExecutableLines) + + return CoverageResponse{ + TotalCoverage: totalCoverage, + Files: files, + }, nil +} + + +// Parses coverage.out using 'go tool cover -func' +func parseWithGoToolCover(coverageFile string, dir string) (CoverageResponse, error) { + log.Printf("INFO: Using go tool cover to parse coverage file") + cmd := exec.Command("go", "tool", "cover", "-func", coverageFile) + cmd.Dir = dir + out, err := cmd.CombinedOutput() + if err != nil { + return CoverageResponse{}, fmt.Errorf("go tool cover failed: %v, output: %s", err, string(out)) + } + + output := string(out) + log.Printf("DEBUG: go tool cover output (first 500 chars):\n%s", + output[:min(len(output), 500)]) + lines := strings.Split(output, "\n") + fileData := make(map[string]*FileStats) + var totalCoverage float64 + + for _, line := range lines { + line = strings.TrimSpace(line) + if line == "" { + continue + } + + if strings.HasPrefix(line, "total:") { + fields := strings.Fields(line) + if len(fields) >= 3 { + + coverageStr := strings.TrimSuffix(fields[2], "%") + if coverage, err := strconv.ParseFloat(coverageStr, 64); err == nil { + totalCoverage = coverage + } + } + continue + } + parts := strings.Fields(line) + if len(parts) >= 3 { + fileFunc := parts[0] + coverageStr := strings.TrimSuffix(parts[2], "%") + + if coverage, err := strconv.ParseFloat(coverageStr, 64); err == nil { + if colonIndex := strings.Index(fileFunc, ":"); colonIndex != -1 { + filename := fileFunc[:colonIndex] + cleanFilename := cleanupFilename(filename) + if fileData[cleanFilename] == nil { + fileData[cleanFilename] = &FileStats{} + } + fileData[cleanFilename].CoveredLines += int(coverage) + fileData[cleanFilename].TotalExecutableLines += 100 + } + } + } + } + files := make([]FileCoverage, 0, len(fileData)) + for filename, stats := range fileData { + fileCoverage := 0.0 + if stats.TotalExecutableLines > 0 { + fileCoverage = float64(stats.CoveredLines) / float64(stats.TotalExecutableLines) * 100.0 + } + + files = append(files, FileCoverage{ + File: filename, + Coverage: fileCoverage, + }) + } + + log.Printf("INFO: go tool cover parsed %d files, total coverage: %.2f%%", len(files), totalCoverage) + + return CoverageResponse{ + TotalCoverage: totalCoverage, + Files: files, + }, nil +} + +func cleanupFilename(filename string) string { + if strings.Contains(filename, "/") { + parts := strings.Split(filename, "/") + if len(parts) > 3 { + return strings.Join(parts[len(parts)-3:], "/") + } + } + return filename +} + +// Checks if a directory contains any Go files +func dirContainsGoFiles(dir string) bool { + goFiles, err := filepath.Glob(filepath.Join(dir, "*.go")) + if err != nil { + log.Printf("ERROR: Failed to check for Go files in %s: %v", dir, err) + return false + } + return len(goFiles) > 0 +} + +// Finds all directories containing Go code +func findGoCodeDirs(baseDir string, logPrefix string) []string { + log.Printf("INFO: %s Searching for Go code in subdirectories", logPrefix) + var goDirs []string + if dirContainsGoFiles(baseDir) || fileExists(filepath.Join(baseDir, "go.mod")) { + goDirs = append(goDirs, baseDir) + log.Printf("INFO: %s Found Go code in root directory", logPrefix) + } + err := filepath.Walk(baseDir, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + + if info.IsDir() && info.Name() == ".git" { + return filepath.SkipDir + } + if path == baseDir { + return nil + } + + if info.IsDir() && + (dirContainsGoFiles(path) || fileExists(filepath.Join(path, "go.mod"))) { + if !strings.Contains(path, "/vendor/") && + !strings.Contains(path, "/build/") && + !strings.HasPrefix(filepath.Base(path), ".") { + goDirs = append(goDirs, path) + log.Printf("INFO: %s Found Go code in directory: %s", logPrefix, path) + } + } + return nil + }) + + if err != nil { + log.Printf("WARNING: %s Error while walking directories: %v", logPrefix, err) + } + + return goDirs +} + +// Main function to scan coverage for a repo (Go/Python/mixed) +func scanCoverage(req CoverageRequest, saveHistory bool) (CoverageResponse, error) { + logPrefix := fmt.Sprintf("[Repo: %s, Branch: %s]", req.RepoURL, req.Branch) + log.Printf("INFO: %s Starting optimized coverage scan", logPrefix) + + tmpDir, err := os.MkdirTemp("", "covscan") + if err != nil { + log.Printf("ERROR: %s Failed to create temp directory: %v", logPrefix, err) + return CoverageResponse{}, err + } + defer func() { + log.Printf("INFO: %s Cleaning up temp directory: %s", logPrefix, tmpDir) + os.RemoveAll(tmpDir) + }() + + // Git clone with optimizations + args := []string{"clone", "--depth", "1", "--single-branch"} + if req.Branch != "" { + args = append(args, "-b", req.Branch) + } + args = append(args, req.RepoURL, tmpDir) + + log.Printf("INFO: %s Running git clone command: git %s", logPrefix, strings.Join(args, " ")) + + // Add timeout for git clone + ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute) + defer cancel() + + clone := exec.CommandContext(ctx, "git", args...) + if out, err := clone.CombinedOutput(); err != nil { + log.Printf("ERROR: %s Git clone failed: %v, output: %s", logPrefix, err, string(out)) + return CoverageResponse{}, errors.New("Git clone failed: " + string(out)) + } + log.Printf("INFO: %s Successfully cloned repository to %s", logPrefix, tmpDir) + + // Check for custom coverage script first + script := "" + cfgPath := filepath.Join(tmpDir, ".keploy.yaml") + if data, err := os.ReadFile(cfgPath); err == nil { + log.Printf("INFO: %s Found .keploy.yaml, parsing for coverage script", logPrefix) + for _, line := range strings.Split(string(data), "\n") { + line = strings.TrimSpace(line) + if strings.HasPrefix(line, "coverage:") { + script = strings.TrimSpace(strings.TrimPrefix(line, "coverage:")) + log.Printf("INFO: %s Found coverage script: %s", logPrefix, script) + break + } + } + } + + var resp CoverageResponse + var coverageFound bool + + // Determine project type quickly + projectType := detectProjectType(tmpDir, logPrefix) + + // Try custom script first if available + if !coverageFound && script != "" { + log.Printf("INFO: %s Running custom coverage script: %s", logPrefix, script) + ctx, cancel := context.WithTimeout(context.Background(), 3*time.Minute) + defer cancel() + + cmd := exec.CommandContext(ctx, "sh", "-c", script) + cmd.Dir = tmpDir + if out, err := cmd.CombinedOutput(); err != nil { + log.Printf("ERROR: %s Custom script execution failed: %v, output: %s", logPrefix, err, string(out)) + } else { + log.Printf("INFO: %s Custom script executed successfully", logPrefix) + coverageFile := filepath.Join(tmpDir, "coverage.out") + if fileExists(coverageFile) { + if coverageData, err := parseCoverageFile(coverageFile); err == nil { + resp = coverageData + coverageFound = true + } + } + } + } + + // Handle Python projects + if !coverageFound && projectType == "python" { + log.Printf("INFO: %s Running Python coverage (primary language)", logPrefix) + pythonResp, pythonErr := runPythonCoverage(tmpDir, logPrefix) + if pythonErr == nil && pythonResp.TotalCoverage > 0 { + resp = pythonResp + coverageFound = true + log.Printf("INFO: %s Python coverage succeeded: %.2f%%", logPrefix, resp.TotalCoverage) + } else { + log.Printf("WARNING: %s Python coverage failed: %v", logPrefix, pythonErr) + } + } + + // Handle Go projects with optimizations + if !coverageFound { + log.Printf("INFO: %s Starting optimized Go coverage analysis", logPrefix) + goDirectories, err := detectGoStructure(tmpDir, logPrefix) + if err != nil { + if projectType == "python" { + log.Printf("INFO: %s No Go code found, falling back to Python estimation", logPrefix) + if pythonResp, pythonErr := estimatePythonCoverage(tmpDir, logPrefix); pythonErr == nil { + resp = pythonResp + coverageFound = true + } + } + + if !coverageFound { + log.Printf("ERROR: %s No code found in repository", logPrefix) + return CoverageResponse{}, errors.New("No code found in repository") + } + } else { + log.Printf("INFO: %s Found %d Go directories to scan", logPrefix, len(goDirectories)) + + // Use parallel processing for multiple directories + if len(goDirectories) > 1 { + if goResp, success := processGoDirectoriesInParallel(goDirectories, tmpDir, logPrefix); success { + resp = goResp + coverageFound = true + } + } else { + // Single directory - process normally + coverage, files, err := runGoModuleCoverage(goDirectories[0], logPrefix) + if err == nil && coverage > 0 { + resp = CoverageResponse{ + TotalCoverage: coverage, + Files: files, + } + coverageFound = true + } + } + + // Only try fallbacks if parallel processing failed + if !coverageFound { + log.Printf("WARNING: %s Optimized methods failed, trying single fallback", logPrefix) + // Try only one fallback method to save time + if fallbackResp, err := fallbackCoverage(goDirectories[0]); err == nil { + log.Printf("INFO: %s Fallback succeeded", logPrefix) + resp = fallbackResp + coverageFound = true + } + } + } + } + + // Final Python fallback for mixed projects + if !coverageFound { + log.Printf("WARNING: %s No coverage found, trying final Python fallback", logPrefix) + if pythonutils.DetectPythonProject(tmpDir) { + if pythonResp, pythonErr := estimatePythonCoverage(tmpDir, logPrefix); pythonErr == nil { + resp = pythonResp + coverageFound = true + log.Printf("INFO: %s Python estimation fallback succeeded: %.2f%%", logPrefix, resp.TotalCoverage) + } + } + } + + if !coverageFound { + return CoverageResponse{}, errors.New("unable to calculate coverage for this repository") + } + + // Get commit hash + commitHash := "" + cmd := exec.Command("git", "rev-parse", "HEAD") + cmd.Dir = tmpDir + if out, err := cmd.Output(); err == nil { + commitHash = strings.TrimSpace(string(out)) + log.Printf("INFO: %s Got commit hash: %s", logPrefix, commitHash) + } + + // Save to database if requested + if saveHistory { + log.Printf("INFO: %s Saving coverage history to database", logPrefix) + db, err := config.ConnectDB() + if err == nil { + collection := db.Collection("coverage_history") + now := time.Now() + var files []models.FileCoverage + for _, f := range resp.Files { + files = append(files, models.FileCoverage{File: f.File, Coverage: f.Coverage}) + } + + history := models.CoverageHistory{ + ID: primitive.NewObjectID(), + Repository: req.RepoURL, + Branch: req.Branch, + TotalCoverage: resp.TotalCoverage, + Files: files, + Timestamp: now, + CommitHash: commitHash, + } + + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + if result, err := collection.InsertOne(ctx, history); err == nil { + resp.ID = result.InsertedID.(primitive.ObjectID).Hex() + resp.Repository = req.RepoURL + resp.Branch = req.Branch + resp.Timestamp = history.Timestamp.Format(time.RFC3339) + resp.CommitHash = commitHash + log.Printf("INFO: %s Successfully saved coverage history", logPrefix) + } else { + log.Printf("WARNING: %s Failed to save coverage history: %v", logPrefix, err) + } + } + } + + log.Printf("INFO: %s Optimized coverage scan completed successfully. Total coverage: %.2f%%", + logPrefix, resp.TotalCoverage) + return resp, nil +} + +// Detects the primary project type based on structure and files +func detectProjectType(dir string, logPrefix string) string { + log.Printf("INFO: %s Analyzing project structure for primary language", logPrefix) + + if pythonutils.DetectPythonProject(dir) { + projectInfo := pythonutils.DetectPythonProjectInfo(dir, logPrefix) + + if projectInfo.Type == pythonutils.PoetryProject || + projectInfo.Type == pythonutils.PipenvProject || + projectInfo.Type == pythonutils.CondaProject { + log.Printf("INFO: %s Detected sophisticated Python project type: %v", logPrefix, projectInfo.Type) + return "python" + } + } + + goFileCount := 0 + pythonFileCount := 0 + goInRoot := false + pythonInRoot := false + goHasTests := false + pythonHasTests := false + + if files, err := os.ReadDir(dir); err == nil { + for _, file := range files { + if !file.IsDir() { + name := file.Name() + if strings.HasSuffix(name, ".go") { + goFileCount++ + goInRoot = true + if strings.HasSuffix(name, "_test.go") { + goHasTests = true + } + } else if strings.HasSuffix(name, ".py") { + pythonFileCount++ + pythonInRoot = true + if strings.Contains(name, "test") { + pythonHasTests = true + } + } + } + } + } + + hasGoMod := fileExists(filepath.Join(dir, "go.mod")) + hasGoSum := fileExists(filepath.Join(dir, "go.sum")) + hasPyProject := fileExists(filepath.Join(dir, "pyproject.toml")) + hasRequirements := fileExists(filepath.Join(dir, "requirements.txt")) + hasSetupPy := fileExists(filepath.Join(dir, "setup.py")) + hasPoetryLock := fileExists(filepath.Join(dir, "poetry.lock")) + hasPipfile := fileExists(filepath.Join(dir, "Pipfile")) + + + err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if info.IsDir() { + name := strings.ToLower(info.Name()) + if strings.HasPrefix(name, ".") || + name == "vendor" || + name == "node_modules" || + name == "documentation" || + name == "docs" || + name == "examples" || + name == "scripts" { + return filepath.SkipDir + } + } + + if !info.IsDir() { + name := info.Name() + if strings.HasSuffix(name, ".go") { + goFileCount++ + if strings.HasSuffix(name, "_test.go") { + goHasTests = true + } + } else if strings.HasSuffix(name, ".py") { + pythonFileCount++ + if strings.Contains(name, "test") { + pythonHasTests = true + } + } + } + return nil + }) + if err != nil { + log.Printf("WARNING: %s Error walking directory: %v", logPrefix, err) + } + + log.Printf("INFO: %s Project analysis - Go files: %d (root: %t, tests: %t), Python files: %d (root: %t, tests: %t)", + logPrefix, goFileCount, goInRoot, goHasTests, pythonFileCount, pythonInRoot, pythonHasTests) + log.Printf("INFO: %s Key files - go.mod: %t, pyproject.toml: %t, requirements.txt: %t, poetry.lock: %t, Pipfile: %t", + logPrefix, hasGoMod, hasPyProject, hasRequirements, hasPoetryLock, hasPipfile) + + goScore := 0 + pythonScore := 0 + if goFileCount > 0 { + goScore += min(goFileCount/10+1, 5) + } + if pythonFileCount > 0 { + pythonScore += min(pythonFileCount/10+1, 5) + } + + if hasGoMod || hasGoSum { + goScore += 10 + } + if hasPyProject || hasSetupPy { + pythonScore += 10 + } + if hasPoetryLock { + pythonScore += 15 + } + if hasPipfile { + pythonScore += 12 + } + if hasRequirements { + pythonScore += 5 + } + if goInRoot { + goScore += 5 + } + if pythonInRoot { + pythonScore += 5 + } + if goHasTests { + goScore += 3 + } + if pythonHasTests { + pythonScore += 3 + } + totalFiles := goFileCount + pythonFileCount + if totalFiles > 0 { + goRatio := float64(goFileCount) / float64(totalFiles) + pythonRatio := float64(pythonFileCount) / float64(totalFiles) + + if goRatio > 0.7 { + goScore += 5 + } + if pythonRatio > 0.7 { + pythonScore += 5 + } + } + + log.Printf("INFO: %s Language scoring - Go: %d, Python: %d", logPrefix, goScore, pythonScore) + + + if goScore > pythonScore { + log.Printf("INFO: %s Primary language detected: Go", logPrefix) + return "go" + } else if pythonScore > goScore { + log.Printf("INFO: %s Primary language detected: Python", logPrefix) + return "python" + } else if goFileCount > 0 { + log.Printf("INFO: %s Tie-breaker: defaulting to Go", logPrefix) + return "go" + } else if pythonFileCount > 0 { + log.Printf("INFO: %s Tie-breaker: defaulting to Python", logPrefix) + return "python" + } + + log.Printf("INFO: %s No clear primary language detected", logPrefix) + return "unknown" +} + +// Returns minimum of two ints +func min(a, b int) int { + if a < b { + return a + } + return b +} + +func runPythonCoverage(dir string, logPrefix string) (CoverageResponse, error) { + log.Printf("INFO: %s Running enhanced Python coverage analysis", logPrefix) + + pythonResp, err := pythonutils.RunPythonCoverage(dir, logPrefix) + if err != nil { + log.Printf("ERROR: %s Enhanced Python coverage failed: %v", logPrefix, err) + return CoverageResponse{}, err + } + + var files []FileCoverage + for _, f := range pythonResp.Files { + files = append(files, FileCoverage{ + File: f.File, + Coverage: f.Coverage, + }) + } + + return CoverageResponse{ + TotalCoverage: pythonResp.TotalCoverage, + Files: files, + ID: pythonResp.ID, + Repository: pythonResp.Repository, + Branch: pythonResp.Branch, + Timestamp: pythonResp.Timestamp, + CommitHash: pythonResp.CommitHash, + }, nil +} + + +// Estimates Python coverage using pythonutils +func estimatePythonCoverage(dir string, logPrefix string) (CoverageResponse, error) { + log.Printf("INFO: %s Using enhanced Python coverage estimation", logPrefix) + pythonResp, err := pythonutils.EstimatePythonCoverage(dir, logPrefix) + if err != nil { + log.Printf("ERROR: %s Enhanced Python estimation failed: %v", logPrefix, err) + return CoverageResponse{}, err + } + + var files []FileCoverage + for _, f := range pythonResp.Files { + files = append(files, FileCoverage{ + File: f.File, + Coverage: f.Coverage, + }) + } + + return CoverageResponse{ + TotalCoverage: pythonResp.TotalCoverage, + Files: files, + }, nil +} + +// Starts periodic cleanup of old jobs +func CleanupOldJobs() { + log.Println("Starting job cleanup routine") + ticker := time.NewTicker(1 * time.Hour) + go func() { + for { + select { + case <-ticker.C: + performJobCleanup() + } + } + }() +} + +// Performs cleanup of old jobs in DB and memory +func performJobCleanup() { + db, err := config.ConnectDB() + if err != nil { + log.Printf("Failed to connect to database for job cleanup: %v", err) + return + } + collection := db.Collection("coverage_jobs") + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + threshold := time.Now().Add(-24 * time.Hour) + result, err := collection.DeleteMany( + ctx, + bson.M{ + "status": bson.M{"$in": []string{"completed", "failed"}}, + "updated_at": bson.M{"$lt": threshold}, + }, + ) + + if err != nil { + log.Printf("Failed to cleanup old jobs: %v", err) + } else if result.DeletedCount > 0 { + log.Printf("Cleaned up %d completed/failed jobs", result.DeletedCount) + } + + stuckThreshold := time.Now().Add(-2 * time.Hour) + + stuckResult, err := collection.UpdateMany( + ctx, + bson.M{ + "status": "in_progress", + "updated_at": bson.M{"$lt": stuckThreshold}, + }, + bson.M{ + "$set": bson.M{ + "status": "failed", + "error": "Job timed out", + "updated_at": time.Now(), + }, + }, + ) + + if err != nil { + log.Printf("Failed to mark stuck jobs as failed: %v", err) + } else if stuckResult.ModifiedCount > 0 { + log.Printf("Marked %d stuck jobs as failed", stuckResult.ModifiedCount) + } + cleanupInMemoryCache() +} + +// Cleans up in-memory job cache +func cleanupInMemoryCache() { + jobsMutex.Lock() + defer jobsMutex.Unlock() + threshold := time.Now().Add(-30 * time.Minute) + for id, job := range completedJobs { + if job.UpdatedAt.Before(threshold) { + delete(completedJobs, id) + } + } + + log.Printf("In-memory job cache size: %d", len(completedJobs)) +} + +func GetCoverageJobStatus(c *gin.Context) { + jobID := c.Param("job_id") + if isComplete, jobStatus := isJobComplete(jobID); isComplete { + log.Printf("Returning cached job status for %s: %s", jobID, jobStatus.Status) + c.JSON(http.StatusOK, jobStatus) + return + } + db, err := config.ConnectDB() + if err != nil { + log.Printf("Failed to connect to database: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Database connection failed"}) + return + } + + collection := db.Collection("coverage_jobs") + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + + var jobStatus JobStatus + err = collection.FindOne(ctx, bson.M{"job_id": jobID}).Decode(&jobStatus) + + if err != nil { + if err == mongo.ErrNoDocuments { + c.JSON(http.StatusNotFound, gin.H{"error": "Job not found"}) + } else { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to get job status"}) + } + return + } + if jobStatus.Status == "completed" || jobStatus.Status == "failed" { + markJobComplete(jobID, jobStatus.Status, jobStatus.ResultID, jobStatus.Error) + } + + c.JSON(http.StatusOK, jobStatus) +} diff --git a/backend/handlers/coverage_branches.go b/backend/handlers/coverage_branches.go new file mode 100644 index 0000000..a213dfb --- /dev/null +++ b/backend/handlers/coverage_branches.go @@ -0,0 +1,293 @@ +package handlers + +import ( + "context" + "net/http" + "time" + + "github.com/gin-gonic/gin" + "github.com/yourusername/backend/config" + "github.com/yourusername/backend/models" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" + "go.mongodb.org/mongo-driver/mongo/options" +) + +type MultiBranchScanRequest struct { + RepoURL string `json:"repo_url" binding:"required"` + Branches []string `json:"branches" binding:"required"` +} + +type BranchScanStatus struct { + Branch string `json:"branch"` + Status string `json:"status"` + Coverage float64 `json:"coverage,omitempty"` + Error string `json:"error,omitempty"` +} + +type MultiBranchScanResponse struct { + RepoURL string `json:"repo_url"` + TotalScanned int `json:"total_scanned"` + Successful int `json:"successful"` + Failed int `json:"failed"` + Branches []BranchScanStatus `json:"branches"` +} + +func ScanMultipleBranches(c *gin.Context) { + var req MultiBranchScanRequest + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid request"}) + return + } + + if len(req.Branches) == 0 { + c.JSON(http.StatusBadRequest, gin.H{"error": "At least one branch must be specified"}) + return + } + + response := MultiBranchScanResponse{ + RepoURL: req.RepoURL, + TotalScanned: len(req.Branches), + Successful: 0, + Failed: 0, + Branches: make([]BranchScanStatus, 0, len(req.Branches)), + } + + for _, branch := range req.Branches { + branchStatus := BranchScanStatus{Branch: branch} + resp, err := scanCoverage(CoverageRequest{RepoURL: req.RepoURL, Branch: branch}, false) + if err != nil { + branchStatus.Status = "failed" + branchStatus.Error = err.Error() + response.Failed++ + } else { + branchStatus.Status = "success" + branchStatus.Coverage = resp.TotalCoverage + response.Successful++ + } + response.Branches = append(response.Branches, branchStatus) + } + + c.JSON(http.StatusOK, response) +} + +func GetBranchCoverage(c *gin.Context) { + repoURL := c.Query("repo_url") + if repoURL == "" { + c.JSON(http.StatusBadRequest, gin.H{"error": "Repository URL is required"}) + return + } + + db, err := config.ConnectDB() + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Database connection failed"}) + return + } + + collection := db.Collection("coverage_history") + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + pipeline := []bson.M{ + { + "$match": bson.M{ + "repository": repoURL, + }, + }, + { + "$sort": bson.M{"timestamp": -1}, + }, + { + "$group": bson.M{ + "_id": "$branch", + "latest_coverage": bson.M{"$first": "$$ROOT"}, + }, + }, + { + "$replaceRoot": bson.M{"newRoot": "$latest_coverage"}, + }, + { + "$project": bson.M{ + "_id": 1, + "branch": 1, + "total_coverage": 1, + "timestamp": 1, + "commit_hash": 1, + }, + }, + } + + cursor, err := collection.Aggregate(ctx, pipeline) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to query database"}) + return + } + defer cursor.Close(ctx) + + type BranchCoverage struct { + ID primitive.ObjectID `json:"id" bson:"_id"` + Branch string `json:"branch" bson:"branch"` + TotalCoverage float64 `json:"total_coverage" bson:"total_coverage"` + Timestamp time.Time `json:"timestamp" bson:"timestamp"` + CommitHash string `json:"commit_hash,omitempty" bson:"commit_hash"` + } + + var results []BranchCoverage + if err := cursor.All(ctx, &results); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to decode results"}) + return + } + + if results == nil { + results = []BranchCoverage{} + } + + c.JSON(http.StatusOK, results) +} + +func CompareBranchCoverage(c *gin.Context) { + repoURL := c.Query("repo_url") + if repoURL == "" { + c.JSON(http.StatusBadRequest, gin.H{"error": "Repository URL is required"}) + return + } + + branch1 := c.Query("branch1") + branch2 := c.Query("branch2") + + if branch1 == "" || branch2 == "" { + c.JSON(http.StatusBadRequest, gin.H{"error": "Two branches are required for comparison"}) + return + } + + db, err := config.ConnectDB() + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Database connection failed"}) + return + } + + collection := db.Collection("coverage_history") + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + getBranchData := func(branch string) (*models.CoverageHistory, error) { + opts := options.FindOne().SetSort(bson.M{"timestamp": -1}) + var result models.CoverageHistory + err := collection.FindOne(ctx, bson.M{ + "repository": repoURL, + "branch": branch, + }, opts).Decode(&result) + + if err != nil { + return nil, err + } + + return &result, nil + } + + branch1Data, err1 := getBranchData(branch1) + branch2Data, err2 := getBranchData(branch2) + + if err1 != nil || err2 != nil { + c.JSON(http.StatusNotFound, gin.H{"error": "Coverage data not found for one or both branches"}) + return + } + + type FileDiff struct { + File string `json:"file"` + Branch1 float64 `json:"branch1"` + Branch2 float64 `json:"branch2"` + Diff float64 `json:"diff"` + DiffLabel string `json:"diff_label"` + } + + branch1Files := make(map[string]float64) + branch2Files := make(map[string]float64) + + for _, file := range branch1Data.Files { + branch1Files[file.File] = file.Coverage + } + + for _, file := range branch2Data.Files { + branch2Files[file.File] = file.Coverage + } + + allFiles := make(map[string]bool) + for file := range branch1Files { + allFiles[file] = true + } + for file := range branch2Files { + allFiles[file] = true + } + + var fileDiffs []FileDiff + for file := range allFiles { + cov1, ok1 := branch1Files[file] + cov2, ok2 := branch2Files[file] + + diff := 0.0 + diffLabel := "same" + + if !ok1 { + diff = cov2 + diffLabel = "new" + cov1 = 0.0 + } else if !ok2 { + diff = -cov1 + diffLabel = "removed" + cov2 = 0.0 + } else { + diff = cov2 - cov1 + if diff > 0 { + diffLabel = "better" + } else if diff < 0 { + diffLabel = "worse" + } + } + + fileDiffs = append(fileDiffs, FileDiff{ + File: file, + Branch1: cov1, + Branch2: cov2, + Diff: diff, + DiffLabel: diffLabel, + }) + } + + response := struct { + Repository string `json:"repository"` + Branch1 string `json:"branch1"` + Branch2 string `json:"branch2"` + Coverage1 float64 `json:"coverage1"` + Coverage2 float64 `json:"coverage2"` + CoverageDiff float64 `json:"coverage_diff"` + DiffLabel string `json:"diff_label"` + FileDiffs []FileDiff `json:"file_diffs"` + Branch1Date string `json:"branch1_date"` + Branch2Date string `json:"branch2_date"` + Branch1Commit string `json:"branch1_commit,omitempty"` + Branch2Commit string `json:"branch2_commit,omitempty"` + }{ + Repository: repoURL, + Branch1: branch1, + Branch2: branch2, + Coverage1: branch1Data.TotalCoverage, + Coverage2: branch2Data.TotalCoverage, + CoverageDiff: branch2Data.TotalCoverage - branch1Data.TotalCoverage, + FileDiffs: fileDiffs, + Branch1Date: branch1Data.Timestamp.Format(time.RFC3339), + Branch2Date: branch2Data.Timestamp.Format(time.RFC3339), + Branch1Commit: branch1Data.CommitHash, + Branch2Commit: branch2Data.CommitHash, + } + + if response.CoverageDiff > 0 { + response.DiffLabel = "better" + } else if response.CoverageDiff < 0 { + response.DiffLabel = "worse" + } else { + response.DiffLabel = "same" + } + + c.JSON(http.StatusOK, response) +} diff --git a/backend/handlers/coverage_history.go b/backend/handlers/coverage_history.go new file mode 100644 index 0000000..e0dea49 --- /dev/null +++ b/backend/handlers/coverage_history.go @@ -0,0 +1,200 @@ +package handlers + +import ( + "context" + "net/http" + "strconv" + "strings" + "time" + + "github.com/gin-gonic/gin" + "github.com/yourusername/backend/config" + "github.com/yourusername/backend/models" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" + "go.mongodb.org/mongo-driver/mongo/options" +) + +func GetCoverageHistory(c *gin.Context) { + repoURL := c.Query("repo_url") + if repoURL == "" { + c.JSON(http.StatusBadRequest, gin.H{"error": "Repository URL is required"}) + return + } + + searchQuery := c.Query("search") + + db, err := config.ConnectDB() + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Database connection failed"}) + return + } + + collection := db.Collection("coverage_history") + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + filter := bson.M{ + "repository": repoURL, + } + + if branch := c.Query("branch"); branch != "" { + filter["branch"] = branch + } + + if coverageThreshold := c.Query("coverage_threshold"); coverageThreshold != "" { + if threshold, err := strconv.ParseFloat(coverageThreshold, 64); err == nil { + filter["total_coverage"] = bson.M{"$gte": threshold} + } + } + + opts := options.Find().SetSort(bson.M{ + "timestamp": -1, + }).SetLimit(50) + + cursor, err := collection.Find(ctx, filter, opts) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to query database"}) + return + } + defer cursor.Close(ctx) + + var results []models.CoverageHistory + if err := cursor.All(ctx, &results); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to decode results"}) + return + } + + if searchQuery != "" { + searchLower := strings.ToLower(searchQuery) + var filtered []models.CoverageHistory + + for _, item := range results { + if strings.Contains(strings.ToLower(item.Branch), searchLower) { + filtered = append(filtered, item) + } else if item.CommitHash != "" && strings.Contains(strings.ToLower(item.CommitHash), searchLower) { + filtered = append(filtered, item) + } + } + + results = filtered + } + + if results == nil { + results = []models.CoverageHistory{} + } + + c.JSON(http.StatusOK, results) +} + +func GetCoverageById(c *gin.Context) { + id := c.Param("id") + if id == "" { + c.JSON(http.StatusBadRequest, gin.H{"error": "ID is required"}) + return + } + + objID, err := primitive.ObjectIDFromHex(id) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid ID format"}) + return + } + + db, err := config.ConnectDB() + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Database connection failed"}) + return + } + + collection := db.Collection("coverage_history") + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + var result models.CoverageHistory + err = collection.FindOne(ctx, bson.M{ + "_id": objID, + }).Decode(&result) + + if err != nil { + c.JSON(http.StatusNotFound, gin.H{"error": "Coverage record not found"}) + return + } + + c.JSON(http.StatusOK, result) +} + +func GetCoverageTrends(c *gin.Context) { + repoURL := c.Query("repo_url") + if repoURL == "" { + c.JSON(http.StatusBadRequest, gin.H{"error": "Repository URL is required"}) + return + } + + days := 30 + if daysStr := c.Query("days"); daysStr != "" { + if d, err := strconv.Atoi(daysStr); err == nil && d > 0 { + days = d + } + } + + db, err := config.ConnectDB() + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Database connection failed"}) + return + } + + collection := db.Collection("coverage_history") + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + startDate := time.Now().AddDate(0, 0, -days) + + filter := bson.M{ + "repository": repoURL, + "timestamp": bson.M{ + "$gte": startDate, + }, + } + + if branch := c.Query("branch"); branch != "" { + filter["branch"] = branch + } + + opts := options.Find().SetSort(bson.M{ + "timestamp": 1, + }) + + cursor, err := collection.Find(ctx, filter, opts) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to query database"}) + return + } + defer cursor.Close(ctx) + + type TrendPoint struct { + Date string `json:"date"` + Coverage float64 `json:"coverage"` + CommitHash string `json:"commit_hash,omitempty"` + } + + var trends []TrendPoint + var results []models.CoverageHistory + if err := cursor.All(ctx, &results); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to decode results"}) + return + } + + for _, record := range results { + trends = append(trends, TrendPoint{ + Date: record.Timestamp.Format("2006-01-02"), + Coverage: record.TotalCoverage, + CommitHash: record.CommitHash, + }) + } + + if trends == nil { + trends = []TrendPoint{} + } + + c.JSON(http.StatusOK, trends) +} diff --git a/backend/handlers/repository.go b/backend/handlers/repository.go new file mode 100644 index 0000000..6be258e --- /dev/null +++ b/backend/handlers/repository.go @@ -0,0 +1,702 @@ +package handlers + +import ( + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "sort" + "strconv" + "strings" + "sync" + "time" + + "log" + + "github.com/gin-gonic/gin" + "github.com/yourusername/backend/config" + "github.com/yourusername/backend/models" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +type RepoWithLang struct { + models.Repository + Languages map[string]float64 `json:"languages"` +} + +func GetUserRepositories(c *gin.Context) { + userIDStr, exists := c.Get("userID") + if !exists { + c.JSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"}) + return + } + userID, err := primitive.ObjectIDFromHex(userIDStr.(string)) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Invalid user ID format"}) + return + } + + skip := 0 + limit := 10 + skipParam := c.Query("skip") + limitParam := c.Query("limit") + searchParam := c.Query("search") + refreshParam := c.Query("refresh") + refreshData := refreshParam == "true" + + if skipParam != "" { + if skipInt, err := strconv.Atoi(skipParam); err == nil { + skip = skipInt + } + } + + if limitParam != "" { + if limitInt, err := strconv.Atoi(limitParam); err == nil && limitInt > 0 && limitInt <= 50 { + limit = limitInt + } + } + + if !refreshData { + dbRepos, totalCount, dbErr := getRepositoriesFromDB(userID, skip, limit, searchParam) + if dbErr != nil { + } else if totalCount > 0 { + c.JSON(http.StatusOK, gin.H{ + "repositories": dbRepos, + "totalCount": totalCount, + "skip": skip, + "limit": limit, + "source": "database", + }) + return + } + fetchRepositoriesFromGitHub(c, userID, skip, limit, searchParam, false) + return + } + + fetchRepositoriesFromGitHub(c, userID, skip, limit, searchParam, true) +} + +func getRepositoriesFromDB(userID primitive.ObjectID, skip, limit int, search string) ([]RepoWithLang, int, error) { + collection := config.GetCollection("repositories") + filter := bson.M{"user_id": userID} + if search != "" { + filter["$or"] = []bson.M{ + {"name": bson.M{"$regex": search, "$options": "i"}}, + {"description": bson.M{"$regex": search, "$options": "i"}}, + } + } + totalCount, err := collection.CountDocuments(context.Background(), filter) + if err != nil { + return nil, 0, fmt.Errorf("error counting repositories: %w", err) + } + findOptions := options.Find(). + SetSkip(int64(skip)). + SetLimit(int64(limit)). + SetSort(bson.D{{Key: "name", Value: 1}}) + cursor, err := collection.Find(context.Background(), filter, findOptions) + if err != nil { + return nil, 0, fmt.Errorf("error finding repositories: %w", err) + } + defer cursor.Close(context.Background()) + var repositories []models.Repository + if err = cursor.All(context.Background(), &repositories); err != nil { + return nil, 0, fmt.Errorf("error decoding repositories: %w", err) + } + reposWithLang := make([]RepoWithLang, len(repositories)) + for i, repo := range repositories { + reposWithLang[i] = RepoWithLang{ + Repository: repo, + Languages: repo.Languages, + } + } + return reposWithLang, int(totalCount), nil +} + +func fetchRepositoriesFromGitHub(c *gin.Context, userID primitive.ObjectID, skip, limit int, searchParam string, forceRefresh bool) { + log.Printf("Fetching repositories from GitHub - User: %s, Skip: %d, Limit: %d, Search: %s, ForceRefresh: %v", userID.Hex(), skip, limit, searchParam, forceRefresh) + collection := config.GetCollection("users") + var user models.User + if err := collection.FindOne(context.Background(), bson.M{"_id": userID}).Decode(&user); err != nil { + log.Printf("Error: Failed to get user information for ID %s: %v", userID.Hex(), err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to get user information"}) + return + } + if user.AccessToken == "" { + log.Printf("Error: GitHub token not available for user %s", userID.Hex()) + c.JSON(http.StatusUnauthorized, gin.H{"error": "GitHub token not available. Please reconnect your GitHub account."}) + return + } + var allRepos []models.GitHubRepository + page := 1 + perPage := 100 + client := &http.Client{ + Timeout: 30 * time.Second, + } + for { + url := fmt.Sprintf("https://api.github.com/user/repos?per_page=%d&page=%d&type=all", perPage, page) + log.Printf("Making GitHub API request to %s", url) + req, _ := http.NewRequest("GET", url, nil) + req.Header.Set("Authorization", "token "+user.AccessToken) + req.Header.Set("Accept", "application/vnd.github.v3+json") + req.Header.Set("User-Agent", "YourAppName") + if forceRefresh { + req.Header.Set("Cache-Control", "no-cache") + req.URL.RawQuery = req.URL.RawQuery + "&_=" + strconv.FormatInt(time.Now().Unix(), 10) + log.Printf("Adding cache-busting parameter for force refresh: %s", req.URL.String()) + } + resp, err := client.Do(req) + if err != nil { + log.Printf("Error: Failed to fetch repositories from GitHub: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to fetch repositories from GitHub: " + err.Error()}) + return + } + if resp.StatusCode != http.StatusOK { + body, _ := io.ReadAll(resp.Body) + resp.Body.Close() + log.Printf("GitHub API error - Status: %d, Body: %s", resp.StatusCode, string(body)) + switch resp.StatusCode { + case http.StatusForbidden: + c.JSON(http.StatusForbidden, gin.H{"error": "GitHub API rate limit exceeded or permissions issue", "code": resp.StatusCode, "details": string(body)}) + case http.StatusUnauthorized: + c.JSON(http.StatusUnauthorized, gin.H{"error": "GitHub authentication failed. Please reconnect your GitHub account.", "code": resp.StatusCode}) + default: + c.JSON(http.StatusInternalServerError, gin.H{"error": "GitHub API returned an error", "code": resp.StatusCode, "details": string(body)}) + } + return + } + body, _ := io.ReadAll(resp.Body) + resp.Body.Close() + var pageRepos []models.GitHubRepository + if err := json.Unmarshal(body, &pageRepos); err != nil { + log.Printf("Error: Failed to parse GitHub response: %v\nResponse body: %s", err, string(body)) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to parse GitHub response"}) + return + } + log.Printf("Successfully fetched %d repositories from GitHub (page %d) for user %s", len(pageRepos), page, userID.Hex()) + allRepos = append(allRepos, pageRepos...) + if len(pageRepos) < perPage { + break + } + page++ + time.Sleep(100 * time.Millisecond) + } + log.Printf("Total repositories fetched from GitHub: %d", len(allRepos)) + if searchParam != "" { + searchLower := strings.ToLower(searchParam) + var filtered []models.GitHubRepository + for _, repo := range allRepos { + if strings.Contains(strings.ToLower(repo.Name), searchLower) || + (strings.TrimSpace(repo.Description) != "" && strings.Contains(strings.ToLower(repo.Description), searchLower)) { + filtered = append(filtered, repo) + } + } + log.Printf("Applied search filter '%s': filtered from %d to %d repositories", searchParam, len(allRepos), len(filtered)) + allRepos = filtered + } + if c == nil { + processAndSaveRepositoriesBackground(context.Background(), userID, allRepos, skip, limit, user.AccessToken, forceRefresh) + } else { + processAndSaveRepositories(c, userID, allRepos, skip, limit, user.AccessToken, forceRefresh) + } +} + +func processAndSaveRepositories(c *gin.Context, userID primitive.ObjectID, githubRepos []models.GitHubRepository, skip, limit int, accessToken string, forceRefresh bool) { + log.Printf("Processing %d repositories for user %s (forceRefresh: %v)", len(githubRepos), userID.Hex(), forceRefresh) + const maxConcurrent = 5 + sem := make(chan struct{}, maxConcurrent) + var ( + reposWithLang []RepoWithLang + wg sync.WaitGroup + mu sync.Mutex + now = time.Now() + errorCount = 0 + successCount = 0 + ) + for idx, repo := range githubRepos { + wg.Add(1) + go func(idx int, repo models.GitHubRepository) { + defer wg.Done() + sem <- struct{}{} + defer func() { <-sem }() + repoCollection := config.GetCollection("repositories") + var existingRepo models.Repository + err := repoCollection.FindOne( + context.Background(), + bson.M{ + "github_id": repo.ID, + "user_id": userID, + }, + ).Decode(&existingRepo) + var languages map[string]float64 + repoLog := fmt.Sprintf("[Repo %d/%d] %s/%s (ID: %d)", idx+1, len(githubRepos), repo.Owner.Login, repo.Name, repo.ID) + if err == nil && len(existingRepo.Languages) > 0 && !isLanguageDataStale(existingRepo.LastFetched) && !forceRefresh { + log.Printf("%s: Using cached language data (last fetched: %s)", repoLog, existingRepo.LastFetched.Format(time.RFC3339)) + languages = existingRepo.Languages + } else { + langURL := fmt.Sprintf("https://api.github.com/repos/%s/%s/languages", repo.Owner.Login, repo.Name) + log.Printf("%s: Fetching languages from %s", repoLog, langURL) + langReq, _ := http.NewRequest("GET", langURL, nil) + langReq.Header.Set("Authorization", "token "+accessToken) + langReq.Header.Set("Accept", "application/vnd.github.v3+json") + langReq.Header.Set("User-Agent", "YourAppName") + langClient := &http.Client{ + Timeout: 10 * time.Second, + } + langResp, err := langClient.Do(langReq) + if err != nil || langResp.StatusCode != http.StatusOK { + if err != nil { + log.Printf("%s: Error fetching languages: %v", repoLog, err) + } else { + langBody, _ := io.ReadAll(langResp.Body) + log.Printf("%s: Language API error - Status: %d, Body: %s", repoLog, langResp.StatusCode, string(langBody)) + langResp.Body.Close() + } + languages = make(map[string]float64) + if err == nil && len(existingRepo.Languages) > 0 { + log.Printf("%s: Using existing language data due to API error", repoLog) + languages = existingRepo.Languages + } + } else { + defer langResp.Body.Close() + var langBytes map[string]int + if err := json.NewDecoder(langResp.Body).Decode(&langBytes); err != nil { + log.Printf("%s: Error decoding language data: %v", repoLog, err) + languages = make(map[string]float64) + if len(existingRepo.Languages) > 0 { + languages = existingRepo.Languages + } + } else { + total := 0 + for _, b := range langBytes { + total += b + } + languages = make(map[string]float64) + if total > 0 { + for lang, b := range langBytes { + languages[lang] = float64(b) * 100 / float64(total) + } + } + log.Printf("%s: Successfully fetched language data: %v", repoLog, languages) + } + } + } + modelRepo := models.Repository{ + Name: repo.Name, + FullName: repo.FullName, + Description: repo.Description, + URL: repo.URL, + HTMLURL: repo.HTMLURL, + Owner: repo.Owner.Login, + GitHubID: repo.ID, + Private: repo.Private, + Status: "active", + UserID: userID, + Languages: languages, + LastFetched: now, + CreatedAt: existingRepo.CreatedAt, + UpdatedAt: now, + } + if err == nil { + modelRepo.ID = existingRepo.ID + if existingRepo.CreatedAt.IsZero() { + modelRepo.CreatedAt = now + } + log.Printf("%s: Updating existing repository record (ID: %s)", repoLog, existingRepo.ID.Hex()) + } else { + modelRepo.ID = primitive.NewObjectID() + modelRepo.CreatedAt = now + log.Printf("%s: Creating new repository record", repoLog) + } + if err := saveRepositoryToDB(userID, modelRepo, languages); err != nil { + mu.Lock() + errorCount++ + mu.Unlock() + log.Printf("%s: Failed to save repository: %v", repoLog, err) + } else { + mu.Lock() + successCount++ + mu.Unlock() + log.Printf("%s: Successfully saved repository to database", repoLog) + } + mu.Lock() + reposWithLang = append(reposWithLang, RepoWithLang{ + Repository: modelRepo, + Languages: languages, + }) + mu.Unlock() + }(idx, repo) + } + wg.Wait() + log.Printf("Finished processing repositories for user %s - Success: %d, Error: %d", userID.Hex(), successCount, errorCount) + sort.Slice(reposWithLang, func(i, j int) bool { + return strings.ToLower(reposWithLang[i].Name) < strings.ToLower(reposWithLang[j].Name) + }) + totalCount := len(reposWithLang) + end := skip + limit + if end > totalCount { + end = totalCount + } + var paginatedRepos []RepoWithLang + if skip < totalCount { + paginatedRepos = reposWithLang[skip:end] + } else { + paginatedRepos = []RepoWithLang{} + } + c.JSON(http.StatusOK, gin.H{ + "repositories": paginatedRepos, + "totalCount": totalCount, + "skip": skip, + "limit": limit, + "source": "github", + }) +} + +func isLanguageDataStale(lastFetched time.Time) bool { + return lastFetched.IsZero() || time.Since(lastFetched) > 7*24*time.Hour +} + +func saveRepositoryToDB(userID primitive.ObjectID, repo models.Repository, languages map[string]float64) error { + ctx := context.Background() + collection := config.GetCollection("repositories") + filter := bson.M{"github_id": repo.GitHubID, "user_id": userID} + update := bson.M{ + "$set": bson.M{ + "name": repo.Name, + "full_name": repo.FullName, + "description": repo.Description, + "url": repo.URL, + "html_url": repo.HTMLURL, + "owner": repo.Owner, + "private": repo.Private, + "status": repo.Status, + "user_id": userID, + "languages": languages, + "last_fetched": time.Now(), + "updated_at": time.Now(), + }, + } + if repo.CreatedAt.IsZero() { + update["$set"].(bson.M)["created_at"] = time.Now() + } else { + update["$setOnInsert"] = bson.M{"created_at": repo.CreatedAt} + } + opts := options.Update().SetUpsert(true) + result, err := collection.UpdateOne(ctx, filter, update, opts) + if err != nil { + log.Printf("Error saving repository %s (ID: %d) to DB: %v", repo.Name, repo.GitHubID, err) + return err + } + if result.MatchedCount > 0 { + log.Printf("Updated existing repository in DB: %s (ID: %d)", repo.Name, repo.GitHubID) + } else if result.UpsertedCount > 0 { + log.Printf("Inserted new repository in DB: %s (ID: %d, MongoDB ID: %v)", + repo.Name, repo.GitHubID, result.UpsertedID) + } else { + log.Printf("Warning: Repository save operation reported no effect: %s (ID: %d)", repo.Name, repo.GitHubID) + } + return nil +} + +func GetRepositoryByID(c *gin.Context) { + repoID := c.Param("id") + objID, err := primitive.ObjectIDFromHex(repoID) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid repository ID"}) + return + } + collection := config.GetCollection("repositories") + var repo models.Repository + err = collection.FindOne(context.Background(), bson.M{"_id": objID}).Decode(&repo) + if err != nil { + if err == mongo.ErrNoDocuments { + c.JSON(http.StatusNotFound, gin.H{"error": "Repository not found"}) + } else { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Database error: " + err.Error()}) + } + return + } + c.JSON(http.StatusOK, gin.H{"repository": repo}) +} + +func RefreshUserRepositories(c *gin.Context) { + userIDStr, exists := c.Get("userID") + if !exists { + c.JSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"}) + return + } + userID, err := primitive.ObjectIDFromHex(userIDStr.(string)) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Invalid user ID format"}) + return + } + skip := 0 + limit := 10 + skipParam := c.Query("skip") + limitParam := c.Query("limit") + searchParam := c.Query("search") + if skipParam != "" { + if skipInt, err := strconv.Atoi(skipParam); err == nil { + skip = skipInt + } + } + if limitParam != "" { + if limitInt, err := strconv.Atoi(limitParam); err == nil && limitInt > 0 && limitInt <= 50 { + limit = limitInt + } + } + fetchRepositoriesFromGitHub(c, userID, skip, limit, searchParam, true) +} + +func RefreshAllRepositories(c *gin.Context) { + userIDStr, exists := c.Get("userID") + if !exists { + c.JSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"}) + return + } + userID, err := primitive.ObjectIDFromHex(userIDStr.(string)) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Invalid user ID format"}) + return + } + skip := 0 + limit := 100 + searchParam := c.Query("search") + c.JSON(http.StatusOK, gin.H{ + "message": "Repository refresh started. This may take a moment for large collections.", + "status": "processing", + }) + go func() { + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + defer cancel() + collection := config.GetCollection("repositories") + _, err := collection.DeleteMany(ctx, bson.M{"user_id": userID}) + if err != nil { + log.Printf("Error clearing existing repositories for user %s: %v", userID.Hex(), err) + } else { + log.Printf("Cleared existing repositories for user %s before refresh", userID.Hex()) + } + fetchRepositoriesFromGitHubBackground(ctx, userID, skip, limit, searchParam, true) + log.Printf("Completed repository refresh for user %s", userID.Hex()) + }() +} + +func fetchRepositoriesFromGitHubBackground(ctx context.Context, userID primitive.ObjectID, skip, limit int, searchParam string, forceRefresh bool) { + log.Printf("Fetching repositories from GitHub (background) - User: %s, Skip: %d, Limit: %d, Search: %s, ForceRefresh: %v", + userID.Hex(), skip, limit, searchParam, forceRefresh) + collection := config.GetCollection("users") + var user models.User + if err := collection.FindOne(ctx, bson.M{"_id": userID}).Decode(&user); err != nil { + log.Printf("Error: Failed to get user information for ID %s: %v", userID.Hex(), err) + return + } + if user.AccessToken == "" { + log.Printf("Error: GitHub token not available for user %s", userID.Hex()) + return + } + var allRepos []models.GitHubRepository + page := 1 + perPage := 100 + client := &http.Client{ + Timeout: 30 * time.Second, + } + for { + url := fmt.Sprintf("https://api.github.com/user/repos?per_page=%d&page=%d&type=all", perPage, page) + log.Printf("Making GitHub API request to %s", url) + req, err := http.NewRequestWithContext(ctx, "GET", url, nil) + if err != nil { + log.Printf("Error creating request: %v", err) + return + } + req.Header.Set("Authorization", "token "+user.AccessToken) + req.Header.Set("Accept", "application/vnd.github.v3+json") + req.Header.Set("User-Agent", "YourAppName") + if forceRefresh { + req.Header.Set("Cache-Control", "no-cache") + req.URL.RawQuery = req.URL.RawQuery + "&_=" + strconv.FormatInt(time.Now().Unix(), 10) + } + resp, err := client.Do(req) + if err != nil { + log.Printf("Error: Failed to fetch repositories from GitHub: %v", err) + return + } + if resp.StatusCode != http.StatusOK { + body, _ := io.ReadAll(resp.Body) + resp.Body.Close() + log.Printf("GitHub API error - Status: %d, Body: %s", resp.StatusCode, string(body)) + return + } + body, _ := io.ReadAll(resp.Body) + resp.Body.Close() + var pageRepos []models.GitHubRepository + if err := json.Unmarshal(body, &pageRepos); err != nil { + log.Printf("Error: Failed to parse GitHub response: %v", err) + return + } + log.Printf("Successfully fetched %d repositories from GitHub (page %d) for user %s", len(pageRepos), page, userID.Hex()) + allRepos = append(allRepos, pageRepos...) + if len(pageRepos) < perPage { + break + } + page++ + time.Sleep(100 * time.Millisecond) + } + log.Printf("Total repositories fetched from GitHub: %d", len(allRepos)) + if searchParam != "" { + searchLower := strings.ToLower(searchParam) + var filtered []models.GitHubRepository + for _, repo := range allRepos { + if strings.Contains(strings.ToLower(repo.Name), searchLower) || + (strings.TrimSpace(repo.Description) != "" && strings.Contains(strings.ToLower(repo.Description), searchLower)) { + filtered = append(filtered, repo) + } + } + log.Printf("Applied search filter '%s': filtered from %d to %d repositories", searchParam, len(allRepos), len(filtered)) + allRepos = filtered + } + processAndSaveRepositoriesBackground(ctx, userID, allRepos, skip, limit, user.AccessToken, forceRefresh) +} + +func processAndSaveRepositoriesBackground(ctx context.Context, userID primitive.ObjectID, githubRepos []models.GitHubRepository, + skip, limit int, accessToken string, forceRefresh bool) { + log.Printf("Processing %d repositories for user %s (forceRefresh: %v)", len(githubRepos), userID.Hex(), forceRefresh) + const maxConcurrent = 5 + sem := make(chan struct{}, maxConcurrent) + var ( + reposWithLang []RepoWithLang + wg sync.WaitGroup + mu sync.Mutex + now = time.Now() + errorCount = 0 + successCount = 0 + ) + for idx, repo := range githubRepos { + wg.Add(1) + go func(idx int, repo models.GitHubRepository) { + defer wg.Done() + sem <- struct{}{} + defer func() { <-sem }() + repoCollection := config.GetCollection("repositories") + var existingRepo models.Repository + err := repoCollection.FindOne( + ctx, + bson.M{ + "github_id": repo.ID, + "user_id": userID, + }, + ).Decode(&existingRepo) + var languages map[string]float64 + repoLog := fmt.Sprintf("[Repo %d/%d] %s/%s (ID: %d)", idx+1, len(githubRepos), repo.Owner.Login, repo.Name, repo.ID) + if err == nil && len(existingRepo.Languages) > 0 && !isLanguageDataStale(existingRepo.LastFetched) && !forceRefresh { + log.Printf("%s: Using cached language data (last fetched: %s)", repoLog, existingRepo.LastFetched.Format(time.RFC3339)) + languages = existingRepo.Languages + } else { + langURL := fmt.Sprintf("https://api.github.com/repos/%s/%s/languages", repo.Owner.Login, repo.Name) + log.Printf("%s: Fetching languages from %s", repoLog, langURL) + langReq, err := http.NewRequestWithContext(ctx, "GET", langURL, nil) + if err != nil { + log.Printf("%s: Error creating language request: %v", repoLog, err) + languages = make(map[string]float64) + if len(existingRepo.Languages) > 0 { + languages = existingRepo.Languages + } + return + } + langReq.Header.Set("Authorization", "token "+accessToken) + langReq.Header.Set("Accept", "application/vnd.github.v3+json") + langReq.Header.Set("User-Agent", "YourAppName") + langClient := &http.Client{ + Timeout: 10 * time.Second, + } + langResp, err := langClient.Do(langReq) + if err != nil || langResp == nil || langResp.StatusCode != http.StatusOK { + if err != nil { + log.Printf("%s: Error fetching languages: %v", repoLog, err) + } else if langResp == nil { + log.Printf("%s: Null response when fetching languages", repoLog) + } else { + langBody, _ := io.ReadAll(langResp.Body) + log.Printf("%s: Language API error - Status: %d, Body: %s", repoLog, langResp.StatusCode, string(langBody)) + langResp.Body.Close() + } + languages = make(map[string]float64) + if err == nil && len(existingRepo.Languages) > 0 { + log.Printf("%s: Using existing language data due to API error", repoLog) + languages = existingRepo.Languages + } + } else { + defer langResp.Body.Close() + var langBytes map[string]int + if err := json.NewDecoder(langResp.Body).Decode(&langBytes); err != nil { + log.Printf("%s: Error decoding language data: %v", repoLog, err) + languages = make(map[string]float64) + if len(existingRepo.Languages) > 0 { + languages = existingRepo.Languages + } + } else { + total := 0 + for _, b := range langBytes { + total += b + } + languages = make(map[string]float64) + if total > 0 { + for lang, b := range langBytes { + languages[lang] = float64(b) * 100 / float64(total) + } + } + log.Printf("%s: Successfully fetched language data: %v", repoLog, languages) + } + } + } + modelRepo := models.Repository{ + Name: repo.Name, + FullName: repo.FullName, + Description: repo.Description, + URL: repo.URL, + HTMLURL: repo.HTMLURL, + Owner: repo.Owner.Login, + GitHubID: repo.ID, + Private: repo.Private, + Status: "active", + UserID: userID, + Languages: languages, + LastFetched: now, + CreatedAt: existingRepo.CreatedAt, + UpdatedAt: now, + } + if err == nil { + modelRepo.ID = existingRepo.ID + if existingRepo.CreatedAt.IsZero() { + modelRepo.CreatedAt = now + } + log.Printf("%s: Updating existing repository record (ID: %s)", repoLog, existingRepo.ID.Hex()) + } else { + modelRepo.ID = primitive.NewObjectID() + modelRepo.CreatedAt = now + log.Printf("%s: Creating new repository record", repoLog) + } + if err := saveRepositoryToDB(userID, modelRepo, languages); err != nil { + mu.Lock() + errorCount++ + mu.Unlock() + log.Printf("%s: Failed to save repository: %v", repoLog, err) + } else { + mu.Lock() + successCount++ + mu.Unlock() + log.Printf("%s: Successfully saved repository to database", repoLog) + } + mu.Lock() + reposWithLang = append(reposWithLang, RepoWithLang{ + Repository: modelRepo, + Languages: languages, + }) + mu.Unlock() + }(idx, repo) + } + wg.Wait() + log.Printf("Finished processing repositories for user %s - Success: %d, Error: %d", userID.Hex(), successCount, errorCount) +} diff --git a/backend/handlers/user.go b/backend/handlers/user.go new file mode 100644 index 0000000..cfcdeab --- /dev/null +++ b/backend/handlers/user.go @@ -0,0 +1,36 @@ +package handlers + +import ( + "context" + "net/http" + "github.com/gin-gonic/gin" + "github.com/yourusername/backend/config" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" +) + + +func GetUserProfile(c *gin.Context) { + userIDStr, exists := c.Get("userID") + if !exists { + c.JSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"}) + return + } + userID, err := primitive.ObjectIDFromHex(userIDStr.(string)) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Invalid user ID format"}) + return + } + collection := config.GetCollection("users") + var user map[string]interface{} + + err = collection.FindOne(context.Background(), bson.M{"_id": userID}).Decode(&user) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to get user profile"}) + return + } + delete(user, "access_token") + delete(user, "refresh_token") + + c.JSON(http.StatusOK, gin.H{"user": user}) +} diff --git a/backend/main.go b/backend/main.go new file mode 100644 index 0000000..1ad774f --- /dev/null +++ b/backend/main.go @@ -0,0 +1,64 @@ +package main + +import ( + "log" + "net/http" + "os" + "time" + + "github.com/gin-gonic/gin" + "github.com/joho/godotenv" + "github.com/yourusername/backend/config" + "github.com/yourusername/backend/handlers" + "github.com/yourusername/backend/middleware" +) + +func main() { + if err := godotenv.Load(); err != nil { + log.Println("No .env file found") + } + + _, err := config.ConnectDB() + if err != nil { + log.Fatalf("Failed to connect to database: %v", err) + } + + r := gin.Default() + r.Use(middleware.CORSMiddleware()) + + r.POST("/auth/github/signup", handlers.GitHubSignUp) + r.POST("/auth/github/signin", handlers.GitHubSignIn) + r.POST("/coverage", handlers.RunCoverageScan) + r.GET("/coverage/history", handlers.GetCoverageHistory) + r.GET("/coverage/:id", handlers.GetCoverageById) + r.GET("/coverage/trends", handlers.GetCoverageTrends) + r.POST("/coverage/branches", handlers.ScanMultipleBranches) + r.GET("/coverage/branches", handlers.GetBranchCoverage) + r.GET("/coverage/compare", handlers.CompareBranchCoverage) + r.GET("/coverage/status/:job_id", handlers.GetCoverageJobStatus) + + protected := r.Group("/api") + protected.Use(middleware.AuthMiddleware()) + protected.GET("/profile", handlers.GetUserProfile) + protected.GET("/repositories", handlers.GetUserRepositories) + protected.GET("/repositories/refresh", handlers.RefreshUserRepositories) + protected.GET("/repositories/force-refresh", handlers.RefreshAllRepositories) + protected.GET("/github-contributions", handlers.GetGitHubContributions) + + handlers.CleanupOldJobs() + + port := os.Getenv("PORT") + + server := &http.Server{ + Addr: ":" + port, + Handler: r, + ReadTimeout: 200 * time.Second, + WriteTimeout: 200 * time.Second, + } + + log.Printf("Server running on port %s", port) + if err := server.ListenAndServe(); err != nil { + log.Fatalf("Failed to start server: %v", err) + } + +} diff --git a/backend/middleware/auth.go b/backend/middleware/auth.go new file mode 100644 index 0000000..b4640b6 --- /dev/null +++ b/backend/middleware/auth.go @@ -0,0 +1,40 @@ +package middleware + +import ( + "net/http" + "strings" + + "github.com/gin-gonic/gin" + "github.com/yourusername/backend/utils" +) + + + +func AuthMiddleware() gin.HandlerFunc { + return func(c *gin.Context) { + authHeader := c.GetHeader("Authorization") + if authHeader == "" { + c.JSON(http.StatusUnauthorized, gin.H{"error": "Authorization header is required"}) + c.Abort() + return + } + + headerParts := strings.Split(authHeader, " ") + if len(headerParts) != 2 || headerParts[0] != "Bearer" { + c.JSON(http.StatusUnauthorized, gin.H{"error": "Invalid Authorization header format"}) + c.Abort() + return + } + + tokenString := headerParts[1] + userID, err := utils.ValidateToken(tokenString) + if err != nil { + c.JSON(http.StatusUnauthorized, gin.H{"error": "Invalid token"}) + c.Abort() + return + } + + c.Set("userID", userID) + c.Next() + } +} diff --git a/backend/middleware/cors.go b/backend/middleware/cors.go new file mode 100644 index 0000000..1178cf7 --- /dev/null +++ b/backend/middleware/cors.go @@ -0,0 +1,22 @@ +package middleware + +import ( + "github.com/gin-gonic/gin" +) + +// CORSMiddleware handles Cross-Origin Resource Sharing (CORS) headers +func CORSMiddleware() gin.HandlerFunc { + return func(c *gin.Context) { + c.Writer.Header().Set("Access-Control-Allow-Origin", "*") + c.Writer.Header().Set("Access-Control-Allow-Credentials", "true") + c.Writer.Header().Set("Access-Control-Allow-Headers", "Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization, accept, origin, Cache-Control, X-Requested-With, X-Request-ID") + c.Writer.Header().Set("Access-Control-Allow-Methods", "POST, OPTIONS, GET, PUT, DELETE") + + if c.Request.Method == "OPTIONS" { + c.AbortWithStatus(204) + return + } + + c.Next() + } +} diff --git a/backend/models/activity.go b/backend/models/activity.go new file mode 100644 index 0000000..ed56da9 --- /dev/null +++ b/backend/models/activity.go @@ -0,0 +1,60 @@ +package models + +import ( + "time" + + "go.mongodb.org/mongo-driver/bson/primitive" +) + +// UserActivity represents a single activity record +type UserActivity struct { + ID primitive.ObjectID `bson:"_id,omitempty" json:"id,omitempty"` + UserID primitive.ObjectID `bson:"user_id" json:"user_id"` + Type string `bson:"type" json:"type"` // e.g., "commit", "test", "pull_request" + Count int `bson:"count" json:"count"` // Number of actions for that day + RepoName string `bson:"repo_name" json:"repo_name"` // Related repository name + Date time.Time `bson:"date" json:"date"` // The date of activity + CreatedAt time.Time `bson:"created_at" json:"created_at"` +} + +// ActivitySummary represents aggregated activity data for the UI +type ActivitySummary struct { + DailyActivities []DailyActivity `json:"dailyActivities"` + TotalCount int `json:"totalCount"` + MaxCount int `json:"maxCount"` + RepoBreakdown []RepoActivity `json:"repoBreakdown"` + RecentActivity []Activity `json:"recentActivity"` +} + +// GitHubContributionResponse represents the response from GitHub Contributions API +type GitHubContributionResponse struct { + Total TotalContributions `json:"total"` + Contributions []DailyActivity `json:"contributions"` +} + +// TotalContributions represents the total count of contributions +type TotalContributions struct { + LastYear int `json:"lastYear"` +} + +type DailyActivity struct { + Date string `json:"date"` // Format: "YYYY-MM-DD" + Count int `json:"count"` // Activity count for the day + Level int `json:"level"` +} + +// RepoActivity represents activity breakdown by repository +type RepoActivity struct { + RepoName string `json:"repoName"` + Count int `json:"count"` +} + +// Activity represents a single activity item for display +type Activity struct { + ID string `bson:"_id,omitempty" json:"id"` + UserID primitive.ObjectID `bson:"user_id" json:"user_id,omitempty"` + Type string `bson:"type" json:"type"` + RepoName string `bson:"repo_name" json:"repoName"` + Message string `bson:"message" json:"message"` + Timestamp time.Time `bson:"timestamp" json:"timestamp"` +} diff --git a/backend/models/coverage.go b/backend/models/coverage.go new file mode 100644 index 0000000..0e62066 --- /dev/null +++ b/backend/models/coverage.go @@ -0,0 +1,24 @@ +package models + +import ( + "time" + + "go.mongodb.org/mongo-driver/bson/primitive" +) + +// CoverageHistory represents a historical record of coverage scans +type CoverageHistory struct { + ID primitive.ObjectID `bson:"_id,omitempty" json:"id"` + Repository string `bson:"repository" json:"repository"` + Branch string `bson:"branch" json:"branch"` + TotalCoverage float64 `bson:"total_coverage" json:"total_coverage"` + Files []FileCoverage `bson:"files" json:"files"` + Timestamp time.Time `bson:"timestamp" json:"timestamp"` + CommitHash string `bson:"commit_hash,omitempty" json:"commit_hash,omitempty"` +} + +// FileCoverage represents coverage data for a single file +type FileCoverage struct { + File string `bson:"file" json:"file"` + Coverage float64 `bson:"coverage" json:"coverage"` +} diff --git a/backend/models/repository.go b/backend/models/repository.go new file mode 100644 index 0000000..d6df2cb --- /dev/null +++ b/backend/models/repository.go @@ -0,0 +1,42 @@ +package models + +import ( + "time" + + "go.mongodb.org/mongo-driver/bson/primitive" +) + +type Repository struct { + ID primitive.ObjectID `bson:"_id,omitempty" json:"id,omitempty"` + Name string `bson:"name" json:"name"` + FullName string `bson:"full_name" json:"full_name"` + Description string `bson:"description" json:"description"` + URL string `bson:"url" json:"url"` + HTMLURL string `bson:"html_url" json:"html_url"` + Owner string `bson:"owner" json:"owner"` + GitHubID int64 `bson:"github_id" json:"github_id"` + Private bool `bson:"private" json:"private"` + Status string `bson:"status" json:"status"` + UserID primitive.ObjectID `bson:"user_id,omitempty" json:"user_id,omitempty"` + Languages map[string]float64 `bson:"languages" json:"languages"` + LastFetched time.Time `bson:"last_fetched" json:"last_fetched"` + CreatedAt time.Time `bson:"created_at" json:"created_at"` + UpdatedAt time.Time `bson:"updated_at" json:"updated_at"` +} + +type GitHubRepository struct { + ID int64 `json:"id"` + Name string `json:"name"` + FullName string `json:"full_name"` + Description string `json:"description"` + URL string `json:"url"` + HTMLURL string `json:"html_url"` + Private bool `json:"private"` + Owner struct { + Login string `json:"login"` + } `json:"owner"` +} + +type RepositoryResponse struct { + Repositories []Repository `json:"repositories"` +} diff --git a/backend/models/user.go b/backend/models/user.go new file mode 100644 index 0000000..97da3da --- /dev/null +++ b/backend/models/user.go @@ -0,0 +1,45 @@ +package models + +import ( + "time" + + "go.mongodb.org/mongo-driver/bson/primitive" +) + + +type User struct { + ID primitive.ObjectID `bson:"_id,omitempty" json:"id,omitempty"` + GitHubID int64 `bson:"github_id" json:"github_id"` + Username string `bson:"username" json:"username"` + Email string `bson:"email" json:"email"` + Name string `bson:"name" json:"name"` + AvatarURL string `bson:"avatar_url" json:"avatar_url"` + AccessToken string `bson:"access_token" json:"access_token,omitempty"` + RefreshToken string `bson:"refresh_token" json:"refresh_token,omitempty"` + CreatedAt time.Time `bson:"created_at" json:"created_at"` + UpdatedAt time.Time `bson:"updated_at" json:"updated_at"` +} + +type GitHubAuthRequest struct { + Code string `json:"code" binding:"required"` +} + +type AuthResponse struct { + Token string `json:"token"` + User User `json:"user"` +} + +type GitHubUser struct { + ID int64 `json:"id"` + Login string `json:"login"` + Name string `json:"name"` + Email string `json:"email"` + AvatarURL string `json:"avatar_url"` +} + +type GitHubTokenResponse struct { + AccessToken string `json:"access_token"` + TokenType string `json:"token_type"` + Scope string `json:"scope"` + RefreshToken string `json:"refresh_token,omitempty"` +} diff --git a/backend/pythonutils/pythonutils.go b/backend/pythonutils/pythonutils.go new file mode 100644 index 0000000..981bfba --- /dev/null +++ b/backend/pythonutils/pythonutils.go @@ -0,0 +1,1135 @@ +package pythonutils + +import ( + "bufio" + "errors" + "log" + "math" + "os" + "os/exec" + "path/filepath" + "regexp" + "runtime" + "strconv" + "strings" +) + +type FileCoverage struct { + File string `json:"file"` + Coverage float64 `json:"coverage"` +} +type PythonFileStats struct { + TotalExecutableLines int + CoveredLines int + MissedLines int +} + +type CoverageResponse struct { + TotalCoverage float64 `json:"total_coverage"` + Files []FileCoverage `json:"files"` + ID string `json:"id,omitempty"` + Repository string `json:"repository,omitempty"` + Branch string `json:"branch,omitempty"` + Timestamp string `json:"timestamp,omitempty"` + CommitHash string `json:"commit_hash,omitempty"` +} + +type PythonProjectType int + +const ( + UnknownProject PythonProjectType = iota + PipProject + PoetryProject + SetupPyProject + PipenvProject + CondaProject +) + +type PythonProjectInfo struct { + Type PythonProjectType + HasPoetry bool + HasPipfile bool + HasSetupPy bool + HasRequirements bool + HasPyProjectToml bool + HasPoetryLock bool + HasCondaEnv bool + PythonPath string + WorkingDir string +} + +// Check if a file exists and is not a directory +func FileExists(path string) bool { + info, err := os.Stat(path) + return err == nil && !info.IsDir() +} + +// Analyze a directory to determine Python project type and configuration +func DetectPythonProjectInfo(dir string, logPrefix string) *PythonProjectInfo { + log.Printf("INFO: %s Analyzing Python project structure in %s", logPrefix, dir) + + info := &PythonProjectInfo{ + Type: UnknownProject, + WorkingDir: dir, + } + info.HasPyProjectToml = FileExists(filepath.Join(dir, "pyproject.toml")) + info.HasPoetryLock = FileExists(filepath.Join(dir, "poetry.lock")) + info.HasRequirements = FileExists(filepath.Join(dir, "requirements.txt")) + info.HasSetupPy = FileExists(filepath.Join(dir, "setup.py")) + info.HasPipfile = FileExists(filepath.Join(dir, "Pipfile")) + info.HasCondaEnv = FileExists(filepath.Join(dir, "environment.yml")) || FileExists(filepath.Join(dir, "conda.yml")) + if info.HasPyProjectToml { + if content, err := os.ReadFile(filepath.Join(dir, "pyproject.toml")); err == nil { + if strings.Contains(string(content), "[tool.poetry]") || + strings.Contains(string(content), "poetry-core") || + strings.Contains(string(content), "poetry.core") { + info.HasPoetry = true + } + } + } + + // Determinaton of the project type priority: + if info.HasPoetry { + info.Type = PoetryProject + log.Printf("INFO: %s Detected Poetry project", logPrefix) + } else if info.HasPipfile { + info.Type = PipenvProject + log.Printf("INFO: %s Detected Pipenv project", logPrefix) + } else if info.HasCondaEnv { + info.Type = CondaProject + log.Printf("INFO: %s Detected Conda project", logPrefix) + } else if info.HasSetupPy { + info.Type = SetupPyProject + log.Printf("INFO: %s Detected setup.py project", logPrefix) + } else if info.HasRequirements { + info.Type = PipProject + log.Printf("INFO: %s Detected pip/requirements.txt project", logPrefix) + } + info.PythonPath = FindPythonExecutable(dir) + + return info +} + +// Locate Poetry executable +func FindPoetryExecutable() string { + log.Printf("INFO: Looking for Poetry executable") + + poetryCommands := []string{ + "poetry", + "/usr/local/bin/poetry", + "/usr/bin/poetry", + "~/.local/bin/poetry", + } + + for _, cmd := range poetryCommands { + if strings.HasPrefix(cmd, "~/") { + if home, err := os.UserHomeDir(); err == nil { + cmd = filepath.Join(home, cmd[2:]) + } + } + + checkCmd := exec.Command("which", cmd) + if out, err := checkCmd.Output(); err == nil { + poetryPath := strings.TrimSpace(string(out)) + log.Printf("INFO: Found Poetry at %s", poetryPath) + return poetryPath + } + } + + // Try direct execution + if err := exec.Command("poetry", "--version").Run(); err == nil { + log.Printf("INFO: Found Poetry in PATH") + return "poetry" + } + + log.Printf("WARNING: Could not find Poetry executable") + return "" +} + +// Locate Pipenv executable +func FindPipenvExecutable() string { + log.Printf("INFO: Looking for Pipenv executable") + + pipenvCommands := []string{ + "pipenv", + "/usr/local/bin/pipenv", + "/usr/bin/pipenv", + } + + for _, cmd := range pipenvCommands { + checkCmd := exec.Command("which", cmd) + if out, err := checkCmd.Output(); err == nil { + pipenvPath := strings.TrimSpace(string(out)) + log.Printf("INFO: Found Pipenv at %s", pipenvPath) + return pipenvPath + } + } + + if err := exec.Command("pipenv", "--version").Run(); err == nil { + log.Printf("INFO: Found Pipenv in PATH") + return "pipenv" + } + + log.Printf("WARNING: Could not find Pipenv executable") + return "" +} + +// Locate Conda executable +func FindCondaExecutable() string { + log.Printf("INFO: Looking for Conda executable") + + condaCommands := []string{ + "conda", + "mamba", + "/usr/local/bin/conda", + "/usr/bin/conda", + "~/miniconda3/bin/conda", + "~/anaconda3/bin/conda", + } + + for _, cmd := range condaCommands { + if strings.HasPrefix(cmd, "~/") { + if home, err := os.UserHomeDir(); err == nil { + cmd = filepath.Join(home, cmd[2:]) + } + } + + checkCmd := exec.Command("which", cmd) + if out, err := checkCmd.Output(); err == nil { + condaPath := strings.TrimSpace(string(out)) + log.Printf("INFO: Found Conda at %s", condaPath) + return condaPath + } + } + + log.Printf("WARNING: Could not find Conda executable") + return "" +} + +// Find Python executable in system or venv +func FindPythonExecutable(dir string) string { + log.Printf("INFO: Looking for Python executable") + + pythonCommands := []string{"python", "python3", "/usr/bin/python", "/usr/bin/python3", + "/usr/local/bin/python", "/usr/local/bin/python3"} + + for _, cmd := range pythonCommands { + checkCmd := exec.Command("which", cmd) + if out, err := checkCmd.Output(); err == nil { + pythonPath := strings.TrimSpace(string(out)) + log.Printf("INFO: Found Python at %s", pythonPath) + return pythonPath + } + } + + venvPaths := []string{ + filepath.Join(dir, "venv", "bin", "python"), + filepath.Join(dir, ".venv", "bin", "python"), + filepath.Join(dir, "env", "bin", "python"), + } + + for _, path := range venvPaths { + if FileExists(path) { + log.Printf("INFO: Found Python in virtual environment at %s", path) + return path + } + } + log.Printf("WARNING: Could not find Python executable") + return "" +} + +// Find pip executable in system or venv +func FindPipExecutable(dir string, pythonPath string) string { + log.Printf("INFO: Looking for pip executable") + + if pythonPath != "" { + checkCmd := exec.Command(pythonPath, "-m", "pip", "--version") + if err := checkCmd.Run(); err == nil { + log.Printf("INFO: Found pip via %s -m pip", pythonPath) + return pythonPath + " -m pip" + } + } + + pipCommands := []string{"pip", "pip3", "/usr/bin/pip", "/usr/bin/pip3", + "/usr/local/bin/pip", "/usr/local/bin/pip3"} + + for _, cmd := range pipCommands { + checkCmd := exec.Command("which", cmd) + if out, err := checkCmd.Output(); err == nil { + pipPath := strings.TrimSpace(string(out)) + log.Printf("INFO: Found pip at %s", pipPath) + return pipPath + } + } + venvPaths := []string{ + filepath.Join(dir, "venv", "bin", "pip"), + filepath.Join(dir, ".venv", "bin", "pip"), + filepath.Join(dir, "env", "bin", "pip"), + } + + for _, path := range venvPaths { + if FileExists(path) { + log.Printf("INFO: Found pip in virtual environment at %s", path) + return path + } + } + + log.Printf("WARNING: Could not find pip executable") + return "" +} + +// Install dependencies using Poetry +func InstallPoetryDependencies(dir string, logPrefix string) error { + poetryPath := FindPoetryExecutable() + if poetryPath == "" { + return errors.New("poetry executable not found") + } + + log.Printf("INFO: %s Installing dependencies with Poetry", logPrefix) + installCmd := exec.Command(poetryPath, "install") + installCmd.Dir = dir + installOut, installErr := installCmd.CombinedOutput() + + if installErr != nil { + log.Printf("WARNING: %s Poetry install failed: %v, output: %s", logPrefix, installErr, string(installOut)) + + // Try installing with --no-dev flag + log.Printf("INFO: %s Retrying Poetry install without dev dependencies", logPrefix) + installCmd = exec.Command(poetryPath, "install", "--no-dev") + installCmd.Dir = dir + installOut, installErr = installCmd.CombinedOutput() + + if installErr != nil { + log.Printf("ERROR: %s Poetry install failed even without dev dependencies: %v, output: %s", + logPrefix, installErr, string(installOut)) + return installErr + } + } + + log.Printf("INFO: %s Successfully installed Poetry dependencies", logPrefix) + log.Printf("INFO: %s Adding coverage dependencies via Poetry", logPrefix) + addCmd := exec.Command(poetryPath, "add", "--dev", "coverage", "pytest", "pytest-cov") + addCmd.Dir = dir + addOut, addErr := addCmd.CombinedOutput() + + if addErr != nil { + log.Printf("WARNING: %s Failed to add coverage dependencies: %v, output: %s", + logPrefix, addErr, string(addOut)) + } else { + log.Printf("INFO: %s Successfully added coverage dependencies", logPrefix) + } + + return nil +} + +// Install dependencies using Pipenv +func InstallPipenvDependencies(dir string, logPrefix string) error { + pipenvPath := FindPipenvExecutable() + if pipenvPath == "" { + return errors.New("pipenv executable not found") + } + + log.Printf("INFO: %s Installing dependencies with Pipenv", logPrefix) + installCmd := exec.Command(pipenvPath, "install", "--dev") + installCmd.Dir = dir + installOut, installErr := installCmd.CombinedOutput() + + if installErr != nil { + log.Printf("WARNING: %s Pipenv install failed: %v, output: %s", logPrefix, installErr, string(installOut)) + installCmd = exec.Command(pipenvPath, "install") + installCmd.Dir = dir + installOut, installErr = installCmd.CombinedOutput() + + if installErr != nil { + log.Printf("ERROR: %s Pipenv install failed: %v, output: %s", logPrefix, installErr, string(installOut)) + return installErr + } + } + + log.Printf("INFO: %s Successfully installed Pipenv dependencies", logPrefix) + log.Printf("INFO: %s Installing coverage with Pipenv", logPrefix) + coverageCmd := exec.Command(pipenvPath, "install", "coverage", "pytest", "pytest-cov", "--dev") + coverageCmd.Dir = dir + coverageOut, coverageErr := coverageCmd.CombinedOutput() + + if coverageErr != nil { + log.Printf("WARNING: %s Failed to install coverage with Pipenv: %v, output: %s", + logPrefix, coverageErr, string(coverageOut)) + } else { + log.Printf("INFO: %s Successfully installed coverage with Pipenv", logPrefix) + } + + return nil +} + +// Install dependencies using Conda +func InstallCondaDependencies(dir string, logPrefix string) error { + condaPath := FindCondaExecutable() + if condaPath == "" { + return errors.New("conda executable not found") + } + + log.Printf("INFO: %s Installing dependencies with Conda", logPrefix) + + envFile := filepath.Join(dir, "environment.yml") + if !FileExists(envFile) { + envFile = filepath.Join(dir, "conda.yml") + } + + if FileExists(envFile) { + installCmd := exec.Command(condaPath, "env", "create", "-f", envFile) + installCmd.Dir = dir + installOut, installErr := installCmd.CombinedOutput() + + if installErr != nil { + log.Printf("WARNING: %s Conda env create failed: %v, output: %s", logPrefix, installErr, string(installOut)) + } else { + log.Printf("INFO: %s Successfully created Conda environment", logPrefix) + } + } + coverageCmd := exec.Command(condaPath, "install", "-c", "conda-forge", "coverage", "pytest", "pytest-cov", "-y") + coverageCmd.Dir = dir + coverageOut, coverageErr := coverageCmd.CombinedOutput() + + if coverageErr != nil { + log.Printf("WARNING: %s Failed to install coverage with Conda: %v, output: %s", + logPrefix, coverageErr, string(coverageOut)) + } else { + log.Printf("INFO: %s Successfully installed coverage with Conda", logPrefix) + } + + return nil +} + +// Detect Python test frameworks in a project +func DetectPythonTestFrameworks(dir string, pythonPath string) []string { + log.Printf("INFO: Detecting Python test frameworks") + frameworks := []string{} + + if pythonPath == "" { + return frameworks + } + if files, err := filepath.Glob(filepath.Join(dir, "**/*test*.py")); err == nil && len(files) > 0 { + for _, file := range files { + content, err := os.ReadFile(file) + if err == nil { + if strings.Contains(string(content), "import pytest") || + strings.Contains(string(content), "from pytest") { + frameworks = append(frameworks, "pytest") + break + } + } + } + + for _, file := range files { + content, err := os.ReadFile(file) + if err == nil { + if strings.Contains(string(content), "unittest.TestCase") || + strings.Contains(string(content), "import unittest") { + frameworks = append(frameworks, "unittest") + break + } + } + } + } + reqFiles := []string{ + "requirements.txt", "requirements-test.txt", "requirements-dev.txt", + "pyproject.toml", "Pipfile", "setup.py", + } + + for _, reqFile := range reqFiles { + reqPath := filepath.Join(dir, reqFile) + if FileExists(reqPath) { + content, err := os.ReadFile(reqPath) + if err == nil { + contentStr := string(content) + if strings.Contains(contentStr, "pytest") { + frameworks = append(frameworks, "pytest") + } + if strings.Contains(contentStr, "unittest") { + frameworks = append(frameworks, "unittest") + } + if strings.Contains(contentStr, "coverage") || + strings.Contains(contentStr, "pytest-cov") { + frameworks = append(frameworks, "coverage") + } + } + } + } + frameworksMap := make(map[string]bool) + for _, f := range frameworks { + frameworksMap[f] = true + } + + frameworks = []string{} + for f := range frameworksMap { + frameworks = append(frameworks, f) + } + + log.Printf("INFO: Detected Python test frameworks: %v", frameworks) + return frameworks +} + +// Check if a directory contains a Python project +func DetectPythonProject(dir string) bool { + log.Printf("INFO: Detecting if %s is a Python project", dir) + pythonFiles := []string{ + "setup.py", + "requirements.txt", + "pyproject.toml", + "Pipfile", + "Pipfile.lock", + "poetry.lock", + "tox.ini", + "environment.yml", + "conda.yml", + } + + for _, file := range pythonFiles { + fullPath := filepath.Join(dir, file) + if FileExists(fullPath) { + log.Printf("INFO: Python project detected by presence of %s", file) + return true + } + } + + pyFiles, err := filepath.Glob(filepath.Join(dir, "*.py")) + if err != nil { + log.Printf("WARNING: Error checking for Python files: %v", err) + } else { + if len(pyFiles) > 0 { + log.Printf("INFO: Python project detected by presence of .py files: %d found", len(pyFiles)) + return true + } + } + + dirs, err := os.ReadDir(dir) + if err == nil { + for _, entry := range dirs { + if entry.IsDir() { + initPath := filepath.Join(dir, entry.Name(), "__init__.py") + if FileExists(initPath) { + log.Printf("INFO: Python project detected by presence of package directory with __init__.py: %s", entry.Name()) + return true + } + subPyFiles, _ := filepath.Glob(filepath.Join(dir, entry.Name(), "*.py")) + if len(subPyFiles) > 0 { + log.Printf("INFO: Python project detected by presence of .py files in subdirectory %s", entry.Name()) + return true + } + } + } + } + + log.Printf("INFO: Not a Python project") + return false +} +func Contains(slice []string, str string) bool { + for _, s := range slice { + if s == str { + return true + } + } + return false +} + +// Create and activate a Python virtual environment +func CreatePythonVirtualEnv(projectDir string, logPrefix string) (string, string, error) { + log.Printf("INFO: %s Creating Python virtual environment due to externally managed environment", logPrefix) + venvPath := filepath.Join(projectDir, ".keploy_venv") + pythonPath := FindPythonExecutable(projectDir) + if pythonPath == "" { + return "", "", errors.New("no python executable found") + } + venvCmd := exec.Command(pythonPath, "-m", "venv", venvPath) + venvCmd.Dir = projectDir + venvOut, venvErr := venvCmd.CombinedOutput() + if venvErr != nil { + log.Printf("ERROR: %s Failed to create virtual environment: %v, output: %s", logPrefix, venvErr, string(venvOut)) + return "", "", venvErr + } + var venvPythonPath, venvPipPath string + if runtime.GOOS == "windows" { + venvPythonPath = filepath.Join(venvPath, "Scripts", "python.exe") + venvPipPath = filepath.Join(venvPath, "Scripts", "pip.exe") + } else { + venvPythonPath = filepath.Join(venvPath, "bin", "python") + venvPipPath = filepath.Join(venvPath, "bin", "pip") + } + if !FileExists(venvPythonPath) { + return "", "", errors.New("failed to locate python in virtual environment") + } + + log.Printf("INFO: %s Successfully created virtual environment at %s", logPrefix, venvPath) + return venvPythonPath, venvPipPath, nil +} + +// Run coverage tests using Poetry +func RunCoverageWithPoetry(dir string, logPrefix string, frameworks []string) (CoverageResponse, error) { + log.Printf("INFO: %s Running coverage with Poetry", logPrefix) + + poetryPath := FindPoetryExecutable() + if poetryPath == "" { + return CoverageResponse{}, errors.New("poetry executable not found") + } + if err := InstallPoetryDependencies(dir, logPrefix); err != nil { + log.Printf("WARNING: %s Failed to install Poetry dependencies: %v", logPrefix, err) + } + + var runErr error + var runOut []byte + if Contains(frameworks, "pytest") || len(frameworks) == 0 { + log.Printf("INFO: %s Running pytest with Poetry coverage", logPrefix) + runCmd := exec.Command(poetryPath, "run", "coverage", "run", "--source=.", "-m", "pytest") + runCmd.Dir = dir + runOut, runErr = runCmd.CombinedOutput() + + if runErr != nil { + log.Printf("WARNING: %s Poetry coverage with pytest failed: %v, output: %s", logPrefix, runErr, string(runOut)) + runCmd = exec.Command(poetryPath, "run", "pytest", "--cov=.", "--cov-report=term") + runCmd.Dir = dir + runOut, runErr = runCmd.CombinedOutput() + + if runErr == nil { + log.Printf("INFO: %s Poetry pytest-cov succeeded", logPrefix) + return parsePytestCovOutput(string(runOut), logPrefix) + } + } else { + log.Printf("INFO: %s Poetry coverage run with pytest succeeded", logPrefix) + } + } + if runErr != nil || Contains(frameworks, "unittest") { + log.Printf("INFO: %s Trying unittest with Poetry", logPrefix) + runCmd := exec.Command(poetryPath, "run", "coverage", "run", "--source=.", "-m", "unittest", "discover") + runCmd.Dir = dir + runOut, runErr = runCmd.CombinedOutput() + + if runErr != nil { + log.Printf("WARNING: %s Poetry coverage with unittest failed: %v, output: %s", logPrefix, runErr, string(runOut)) + } else { + log.Printf("INFO: %s Poetry coverage run with unittest succeeded", logPrefix) + } + } + + if runErr == nil { + reportCmd := exec.Command(poetryPath, "run", "coverage", "report") + reportCmd.Dir = dir + reportOut, reportErr := reportCmd.CombinedOutput() + + if reportErr != nil { + log.Printf("WARNING: %s Failed to generate Poetry coverage report: %v", logPrefix, reportErr) + } else { + return parseCoverageReport(string(reportOut), logPrefix) + } + } + + return CoverageResponse{}, runErr +} + +// Run coverage tests using Pipenv +func RunCoverageWithPipenv(dir string, logPrefix string, frameworks []string) (CoverageResponse, error) { + log.Printf("INFO: %s Running coverage with Pipenv", logPrefix) + + pipenvPath := FindPipenvExecutable() + if pipenvPath == "" { + return CoverageResponse{}, errors.New("pipenv executable not found") + } + + if err := InstallPipenvDependencies(dir, logPrefix); err != nil { + log.Printf("WARNING: %s Failed to install Pipenv dependencies: %v", logPrefix, err) + } + + var runErr error + var runOut []byte + + if Contains(frameworks, "pytest") || len(frameworks) == 0 { + log.Printf("INFO: %s Running pytest with Pipenv coverage", logPrefix) + + runCmd := exec.Command(pipenvPath, "run", "coverage", "run", "--source=.", "-m", "pytest") + runCmd.Dir = dir + runOut, runErr = runCmd.CombinedOutput() + + if runErr != nil { + log.Printf("WARNING: %s Pipenv coverage with pytest failed: %v, output: %s", logPrefix, runErr, string(runOut)) + + // Try pytest-cov + runCmd = exec.Command(pipenvPath, "run", "pytest", "--cov=.", "--cov-report=term") + runCmd.Dir = dir + runOut, runErr = runCmd.CombinedOutput() + + if runErr == nil { + return parsePytestCovOutput(string(runOut), logPrefix) + } + } else { + log.Printf("INFO: %s Pipenv coverage run with pytest succeeded", logPrefix) + } + } + + if runErr != nil || Contains(frameworks, "unittest") { + log.Printf("INFO: %s Trying unittest with Pipenv", logPrefix) + runCmd := exec.Command(pipenvPath, "run", "coverage", "run", "--source=.", "-m", "unittest", "discover") + runCmd.Dir = dir + runOut, runErr = runCmd.CombinedOutput() + + if runErr != nil { + log.Printf("WARNING: %s Pipenv coverage with unittest failed: %v, output: %s", logPrefix, runErr, string(runOut)) + } else { + log.Printf("INFO: %s Pipenv coverage run with unittest succeeded", logPrefix) + } + } + + if runErr == nil { + reportCmd := exec.Command(pipenvPath, "run", "coverage", "report") + reportCmd.Dir = dir + reportOut, reportErr := reportCmd.CombinedOutput() + + if reportErr != nil { + log.Printf("WARNING: %s Failed to generate Pipenv coverage report: %v", logPrefix, reportErr) + } else { + return parseCoverageReport(string(reportOut), logPrefix) + } + } + + return CoverageResponse{}, runErr +} + +func parsePytestCovOutput(output string, logPrefix string) (CoverageResponse, error) { + log.Printf("INFO: %s Parsing pytest-cov output", logPrefix) + + re := regexp.MustCompile(`TOTAL\s+\d+\s+\d+\s+(\d+)%`) + matches := re.FindStringSubmatch(output) + + if len(matches) < 2 { + re = regexp.MustCompile(`Total coverage:\s*(\d+(?:\.\d+)?)%`) + matches = re.FindStringSubmatch(output) + } + + if len(matches) >= 2 { + if totalCov, err := strconv.ParseFloat(matches[1], 64); err == nil { + return CoverageResponse{TotalCoverage: totalCov, Files: []FileCoverage{}}, nil + } + } + + return CoverageResponse{}, errors.New("failed to parse pytest-cov output") +} + +// Parse coverage report output to extract total coverage and file details +func parseCoverageReport(output string, logPrefix string) (CoverageResponse, error) { + log.Printf("INFO: %s Parsing coverage report", logPrefix) + re := regexp.MustCompile(`TOTAL\s+\d+\s+\d+\s+(\d+)%`) + matches := re.FindStringSubmatch(output) + + if len(matches) < 2 { + log.Printf("WARNING: %s No total coverage found in report", logPrefix) + return CoverageResponse{}, errors.New("no coverage data found in report") + } + + totalCov, err := strconv.ParseFloat(matches[1], 64) + if err != nil { + return CoverageResponse{}, err + } + + log.Printf("INFO: %s Successfully extracted total coverage: %.2f%%", logPrefix, totalCov) + var files []FileCoverage + scanner := bufio.NewScanner(strings.NewReader(output)) + for scanner.Scan() { + line := scanner.Text() + if strings.HasPrefix(line, "Name") || + strings.HasPrefix(line, "----") || + strings.HasPrefix(line, "TOTAL") || + strings.TrimSpace(line) == "" { + continue + } + fields := strings.Fields(line) + if len(fields) >= 4 { + fileName := fields[0] + covStr := strings.TrimSuffix(fields[3], "%") + if coverage, err := strconv.ParseFloat(covStr, 64); err == nil { + files = append(files, FileCoverage{ + File: fileName, + Coverage: coverage, + }) + } + } + } + + return CoverageResponse{TotalCoverage: totalCov, Files: files}, nil +} + +// Estimate Python coverage by analyzing files +func EstimatePythonCoverage(dir string, logPrefix string) (CoverageResponse, error) { + log.Printf("INFO: %s Estimating Python coverage by analyzing files", logPrefix) + var pyFiles []string + err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if strings.HasPrefix(filepath.Base(path), ".") { + if info.IsDir() { + return filepath.SkipDir + } + return nil + } + if !info.IsDir() && strings.HasSuffix(path, ".py") && + !strings.Contains(filepath.Base(path), "test") && !strings.Contains(path, "/tests/") { + pyFiles = append(pyFiles, path) + } + return nil + }) + + if err != nil { + log.Printf("ERROR: %s Failed to walk directory: %v", logPrefix, err) + return CoverageResponse{TotalCoverage: 0, Files: []FileCoverage{}}, errors.New("failed to analyze Python files") + } + + if len(pyFiles) == 0 { + log.Printf("WARNING: %s No Python files found in project", logPrefix) + return CoverageResponse{TotalCoverage: 0, Files: []FileCoverage{}}, errors.New("no Python files found") + } + var testFiles []string + _ = filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if strings.HasPrefix(filepath.Base(path), ".") { + if info.IsDir() { + return filepath.SkipDir + } + return nil + } + if !info.IsDir() && strings.HasSuffix(path, ".py") && + (strings.Contains(filepath.Base(path), "test") || strings.Contains(path, "/tests/")) { + testFiles = append(testFiles, path) + } + return nil + }) + + totalFiles := float64(len(pyFiles)) + totalTestFiles := float64(len(testFiles)) + + if totalFiles == 0 { + return CoverageResponse{TotalCoverage: 0, Files: []FileCoverage{}}, errors.New("no Python files found") + } + + coverageEstimate := math.Min(100, (totalTestFiles/totalFiles)*50+10) + + log.Printf("INFO: %s Estimated coverage: %.2f%% (based on %d source files and %d test files)", + logPrefix, coverageEstimate, int(totalFiles), int(totalTestFiles)) + + var files []FileCoverage + for _, file := range pyFiles { + relPath, err := filepath.Rel(dir, file) + if err != nil { + relPath = file + } + + hasDedicatedTests := false + baseFilename := filepath.Base(file) + baseNameWithoutExt := strings.TrimSuffix(baseFilename, filepath.Ext(baseFilename)) + testFilename := "test_" + baseNameWithoutExt + ".py" + + for _, testFile := range testFiles { + if strings.HasSuffix(testFile, testFilename) { + hasDedicatedTests = true + break + } + content, err := os.ReadFile(testFile) + if err == nil { + if strings.Contains(string(content), "import "+baseNameWithoutExt) || + strings.Contains(string(content), "from "+baseNameWithoutExt) { + hasDedicatedTests = true + break + } + } + } + + fileCoverage := coverageEstimate + if hasDedicatedTests { + fileCoverage = math.Min(100, fileCoverage+20) + } else { + fileCoverage = math.Max(0, fileCoverage-10) + } + + files = append(files, FileCoverage{ + File: relPath, + Coverage: fileCoverage, + }) + } + + return CoverageResponse{TotalCoverage: coverageEstimate, Files: files}, nil +} + +// Run Python coverage tests on a directory +func RunPythonCoverage(dir string, logPrefix string) (CoverageResponse, error) { + log.Printf("INFO: %s Running comprehensive Python coverage analysis", logPrefix) + + projectInfo := DetectPythonProjectInfo(dir, logPrefix) + if projectInfo.Type == UnknownProject { + log.Printf("WARNING: %s Unknown Python project type, falling back to estimation", logPrefix) + return EstimatePythonCoverage(dir, logPrefix) + } + + frameworks := DetectPythonTestFrameworks(dir, projectInfo.PythonPath) + + var result CoverageResponse + var err error + + switch projectInfo.Type { + case PoetryProject: + log.Printf("INFO: %s Processing Poetry project", logPrefix) + result, err = RunCoverageWithPoetry(dir, logPrefix, frameworks) + if err != nil { + log.Printf("WARNING: %s Poetry coverage failed: %v, falling back to pip method", logPrefix, err) + result, err = runStandardPythonCoverage(dir, logPrefix, projectInfo, frameworks) + } + + case PipenvProject: + log.Printf("INFO: %s Processing Pipenv project", logPrefix) + result, err = RunCoverageWithPipenv(dir, logPrefix, frameworks) + if err != nil { + log.Printf("WARNING: %s Pipenv coverage failed: %v, falling back to pip method", logPrefix, err) + result, err = runStandardPythonCoverage(dir, logPrefix, projectInfo, frameworks) + } + + case CondaProject: + log.Printf("INFO: %s Processing Conda project", logPrefix) + if condaErr := InstallCondaDependencies(dir, logPrefix); condaErr != nil { + log.Printf("WARNING: %s Conda setup failed: %v", logPrefix, condaErr) + } + result, err = runStandardPythonCoverage(dir, logPrefix, projectInfo, frameworks) + + case PipProject, SetupPyProject: + log.Printf("INFO: %s Processing standard pip/setup.py project", logPrefix) + result, err = runStandardPythonCoverage(dir, logPrefix, projectInfo, frameworks) + + default: + log.Printf("WARNING: %s Unsupported project type, using estimation", logPrefix) + return EstimatePythonCoverage(dir, logPrefix) + } + + if err != nil { + log.Printf("WARNING: %s All coverage methods failed: %v, falling back to estimation", logPrefix, err) + return EstimatePythonCoverage(dir, logPrefix) + } + + return result, nil +} + +// Run coverage using standard pip/venv approach +func runStandardPythonCoverage(dir string, logPrefix string, projectInfo *PythonProjectInfo, frameworks []string) (CoverageResponse, error) { + log.Printf("INFO: %s Running standard Python coverage", logPrefix) + + pythonPath := projectInfo.PythonPath + if pythonPath == "" { + log.Printf("ERROR: %s No Python executable found", logPrefix) + return EstimatePythonCoverage(dir, logPrefix) + } + + pipPath := FindPipExecutable(dir, pythonPath) + + var venvPythonPath, venvPipPath string + var useVenv bool + + if pipPath != "" { + log.Printf("INFO: %s Installing coverage packages using %s", logPrefix, pipPath) + var installCmd *exec.Cmd + if strings.Contains(pipPath, " -m pip") { + parts := strings.Split(pipPath, " ") + args := append(parts[1:], "install", "coverage", "pytest", "pytest-cov") + installCmd = exec.Command(parts[0], args...) + } else { + installCmd = exec.Command(pipPath, "install", "coverage", "pytest", "pytest-cov") + } + installCmd.Dir = dir + installOut, installErr := installCmd.CombinedOutput() + + if installErr != nil && strings.Contains(string(installOut), "externally-managed-environment") { + log.Printf("INFO: %s Detected externally managed Python environment, switching to virtual environment", logPrefix) + venvPython, venvPip, venvErr := CreatePythonVirtualEnv(dir, logPrefix) + if venvErr == nil { + venvPythonPath = venvPython + venvPipPath = venvPip + useVenv = true + log.Printf("INFO: %s Installing packages in virtual environment with %s", logPrefix, venvPipPath) + venvInstallCmd := exec.Command(venvPipPath, "install", "coverage", "pytest", "pytest-cov") + venvInstallCmd.Dir = dir + venvInstallOut, venvInstallErr := venvInstallCmd.CombinedOutput() + if venvInstallErr != nil { + log.Printf("WARNING: %s Failed to install packages in virtual environment: %v, output: %s", + logPrefix, venvInstallErr, string(venvInstallOut)) + } else { + log.Printf("INFO: %s Successfully installed packages in virtual environment", logPrefix) + } + } else { + log.Printf("WARNING: %s Failed to create virtual environment: %v", logPrefix, venvErr) + } + } else if installErr != nil { + log.Printf("WARNING: %s Failed to install coverage packages: %v, output: %s", logPrefix, installErr, string(installOut)) + } else { + log.Printf("INFO: %s Successfully installed coverage packages", logPrefix) + } + } else { + log.Printf("WARNING: %s No pip executable found, skipping package installation", logPrefix) + } + activePython := pythonPath + if useVenv && venvPythonPath != "" { + activePython = venvPythonPath + log.Printf("INFO: %s Using virtual environment Python: %s", logPrefix, activePython) + } + + var runErr error + var runOut []byte + coverageStrategies := []struct { + name string + cmd []string + }{ + {"pytest with coverage", []string{activePython, "-m", "coverage", "run", "--source=.", "-m", "pytest"}}, + {"pytest-cov plugin", []string{activePython, "-m", "pytest", "--cov=.", "--cov-report=term"}}, + {"unittest with coverage", []string{activePython, "-m", "coverage", "run", "--source=.", "-m", "unittest", "discover"}}, + {"unittest discovery", []string{activePython, "-m", "unittest", "discover", "-v"}}, + } + + for _, strategy := range coverageStrategies { + if (strategy.name == "pytest with coverage" || strategy.name == "pytest-cov plugin") && + !Contains(frameworks, "pytest") && len(frameworks) > 0 { + continue + } + if (strategy.name == "unittest with coverage" || strategy.name == "unittest discovery") && + !Contains(frameworks, "unittest") && Contains(frameworks, "pytest") { + continue + } + + log.Printf("INFO: %s Trying strategy: %s", logPrefix, strategy.name) + + runCmd := exec.Command(strategy.cmd[0], strategy.cmd[1:]...) + runCmd.Dir = dir + runOut, runErr = runCmd.CombinedOutput() + + if runErr == nil { + log.Printf("INFO: %s Strategy '%s' succeeded", logPrefix, strategy.name) + if strategy.name == "pytest-cov plugin" { + if result, parseErr := parsePytestCovOutput(string(runOut), logPrefix); parseErr == nil { + if useVenv { + cleanupVirtualEnv(dir, logPrefix) + } + return result, nil + } + } + break + } else { + log.Printf("WARNING: %s Strategy '%s' failed: %v, output: %s", logPrefix, strategy.name, runErr, string(runOut)) + } + } + if runErr == nil { + reportCmd := exec.Command(activePython, "-m", "coverage", "report") + reportCmd.Dir = dir + reportOut, reportErr := reportCmd.CombinedOutput() + + if reportErr != nil { + log.Printf("WARNING: %s Failed to generate coverage report: %v, output: %s", logPrefix, reportErr, string(reportOut)) + + // Try alternative report generation methods + if result := tryAlternativeReportMethods(activePython, dir, logPrefix); result != nil { + if useVenv { + cleanupVirtualEnv(dir, logPrefix) + } + return *result, nil + } + } else { + log.Printf("INFO: %s Successfully generated coverage report", logPrefix) + + if result, parseErr := parseCoverageReport(string(reportOut), logPrefix); parseErr == nil { + if useVenv { + cleanupVirtualEnv(dir, logPrefix) + } + return result, nil + } + } + } + if useVenv { + cleanupVirtualEnv(dir, logPrefix) + } + + return runTestFilesDirectly(activePython, dir, logPrefix) +} + +func tryAlternativeReportMethods(pythonPath, dir, logPrefix string) *CoverageResponse { + log.Printf("INFO: %s Trying alternative report generation methods", logPrefix) + covFile := filepath.Join(dir, ".coverage") + if FileExists(covFile) { + log.Printf("INFO: %s Found .coverage file, trying to generate JSON report", logPrefix) + jsonCmd := exec.Command(pythonPath, "-m", "coverage", "json") + jsonCmd.Dir = dir + if jsonErr := jsonCmd.Run(); jsonErr == nil { + jsonFile := filepath.Join(dir, "coverage.json") + if FileExists(jsonFile) { + if jsonData, err := os.ReadFile(jsonFile); err == nil { + re := regexp.MustCompile(`"percent_covered":\s*([0-9]+\.?[0-9]*)`) + match := re.FindStringSubmatch(string(jsonData)) + if len(match) >= 2 { + if totalCov, parseErr := strconv.ParseFloat(match[1], 64); parseErr == nil { + log.Printf("INFO: %s Successfully extracted coverage from JSON: %.2f%%", logPrefix, totalCov) + return &CoverageResponse{TotalCoverage: totalCov, Files: []FileCoverage{}} + } + } + } + } + } + + xmlCmd := exec.Command(pythonPath, "-m", "coverage", "xml") + xmlCmd.Dir = dir + xmlCmd.Run() + } + + return nil +} + +func runTestFilesDirectly(pythonPath, dir, logPrefix string) (CoverageResponse, error) { + log.Printf("INFO: %s Attempting to run test files directly", logPrefix) + + // Find test files + testFiles, _ := filepath.Glob(filepath.Join(dir, "test_*.py")) + testDirs, _ := filepath.Glob(filepath.Join(dir, "tests")) + + for _, testDir := range testDirs { + testDirFiles, _ := filepath.Glob(filepath.Join(testDir, "test_*.py")) + testFiles = append(testFiles, testDirFiles...) + } + + if len(testFiles) > 0 { + log.Printf("INFO: %s Found %d test files, trying to run them with coverage", logPrefix, len(testFiles)) + successCount := 0 + + for _, testFile := range testFiles { + testCmd := exec.Command(pythonPath, "-m", "coverage", "run", "-a", "--source=.", testFile) + testCmd.Dir = dir + if testErr := testCmd.Run(); testErr != nil { + log.Printf("WARNING: %s Failed to run test file %s: %v", logPrefix, testFile, testErr) + } else { + log.Printf("INFO: %s Successfully ran test file %s", logPrefix, testFile) + successCount++ + } + } + + if successCount > 0 { + reportCmd := exec.Command(pythonPath, "-m", "coverage", "report") + reportCmd.Dir = dir + if reportOut, reportErr := reportCmd.CombinedOutput(); reportErr == nil { + if result, parseErr := parseCoverageReport(string(reportOut), logPrefix); parseErr == nil { + return result, nil + } + } + } + } + log.Printf("INFO: %s All direct test execution failed, falling back to estimation", logPrefix) + return EstimatePythonCoverage(dir, logPrefix) +} + +// Remove the temporary virtual environment +func cleanupVirtualEnv(dir, logPrefix string) { + venvPath := filepath.Join(dir, ".keploy_venv") + if FileExists(venvPath) { + log.Printf("INFO: %s Cleaning up virtual environment at %s", logPrefix, venvPath) + os.RemoveAll(venvPath) + } +} diff --git a/backend/utils/jwt.go b/backend/utils/jwt.go new file mode 100644 index 0000000..3d3daaa --- /dev/null +++ b/backend/utils/jwt.go @@ -0,0 +1,68 @@ +package utils + +import ( + "fmt" + "os" + "time" + + "github.com/golang-jwt/jwt/v4" + "github.com/google/uuid" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +type JWTClaims struct { + UserID string `json:"user_id"` + jwt.RegisteredClaims +} + +func GenerateToken(userID primitive.ObjectID) (string, error) { + jwtSecret := os.Getenv("JWT_SECRET") + if jwtSecret == "" { + jwtSecret = uuid.New().String() + fmt.Println("Warning: JWT_SECRET not set in environment variables. Using a random one for this session.") + } + + claims := JWTClaims{ + userID.Hex(), + jwt.RegisteredClaims{ + ExpiresAt: jwt.NewNumericDate(time.Now().Add(24 * time.Hour)), + IssuedAt: jwt.NewNumericDate(time.Now()), + NotBefore: jwt.NewNumericDate(time.Now()), + Issuer: "github-auth-api", + }, + } + + token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) + + + tokenString, err := token.SignedString([]byte(jwtSecret)) + if err != nil { + return "", err + } + + return tokenString, nil +} + + +func ValidateToken(tokenString string) (string, error) { + jwtSecret := os.Getenv("JWT_SECRET") + if jwtSecret == "" { + return "", fmt.Errorf("JWT_SECRET not set in environment variables") + } + + token, err := jwt.ParseWithClaims(tokenString, &JWTClaims{}, func(token *jwt.Token) (interface{}, error) { + if _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok { + return nil, fmt.Errorf("unexpected signing method: %v", token.Header["alg"]) + } + return []byte(jwtSecret), nil + }) + if err != nil { + return "", err + } + + if claims, ok := token.Claims.(*JWTClaims); ok && token.Valid { + return claims.UserID, nil + } + + return "", fmt.Errorf("invalid token") +} diff --git a/poc-frontend/.gitignore b/poc-frontend/.gitignore new file mode 100644 index 0000000..5ef6a52 --- /dev/null +++ b/poc-frontend/.gitignore @@ -0,0 +1,41 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +/node_modules +/.pnp +.pnp.* +.yarn/* +!.yarn/patches +!.yarn/plugins +!.yarn/releases +!.yarn/versions + +# testing +/coverage + +# next.js +/.next/ +/out/ + +# production +/build + +# misc +.DS_Store +*.pem + +# debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* +.pnpm-debug.log* + +# env files (can opt-in for committing if needed) +.env* + +# vercel +.vercel + +# typescript +*.tsbuildinfo +next-env.d.ts diff --git a/poc-frontend/components.json b/poc-frontend/components.json new file mode 100644 index 0000000..ffe928f --- /dev/null +++ b/poc-frontend/components.json @@ -0,0 +1,21 @@ +{ + "$schema": "https://ui.shadcn.com/schema.json", + "style": "new-york", + "rsc": true, + "tsx": true, + "tailwind": { + "config": "", + "css": "src/app/globals.css", + "baseColor": "neutral", + "cssVariables": true, + "prefix": "" + }, + "aliases": { + "components": "@/components", + "utils": "@/lib/utils", + "ui": "@/components/ui", + "lib": "@/lib", + "hooks": "@/hooks" + }, + "iconLibrary": "lucide" +} \ No newline at end of file diff --git a/poc-frontend/next.config.ts b/poc-frontend/next.config.ts new file mode 100644 index 0000000..9992c72 --- /dev/null +++ b/poc-frontend/next.config.ts @@ -0,0 +1,10 @@ +import type { NextConfig } from "next"; + +const nextConfig: NextConfig = { + output: "export", + images: { + domains: ['avatars.githubusercontent.com'], + }, +}; + +export default nextConfig; diff --git a/poc-frontend/package-lock.json b/poc-frontend/package-lock.json new file mode 100644 index 0000000..d97757d --- /dev/null +++ b/poc-frontend/package-lock.json @@ -0,0 +1,2520 @@ +{ + "name": "poc-fe", + "version": "0.1.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "poc-fe", + "version": "0.1.0", + "dependencies": { + "axios": "^1.8.4", + "chart.js": "^4.4.8", + "class-variance-authority": "^0.7.1", + "clsx": "^2.1.1", + "lucide-react": "^0.486.0", + "motion": "^12.6.3", + "next": "15.2.4", + "react": "^19.0.0", + "react-chartjs-2": "^5.3.0", + "react-cookie": "^8.0.1", + "react-dom": "^19.0.0", + "react-grid-layout": "^1.5.1", + "react-tooltip": "^5.28.1", + "recharts": "^2.15.1", + "simplex-noise": "^4.0.3", + "tailwind-merge": "^3.1.0", + "tw-animate-css": "^1.2.5", + "universal-cookie": "^8.0.1" + }, + "devDependencies": { + "@tailwindcss/postcss": "^4", + "@types/node": "^20", + "@types/react": "^19", + "@types/react-dom": "^19", + "@types/react-grid-layout": "^1.3.5", + "tailwindcss": "^4", + "typescript": "^5" + } + }, + "node_modules/@alloc/quick-lru": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz", + "integrity": "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@babel/runtime": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.27.0.tgz", + "integrity": "sha512-VtPOkrdPHZsKc/clNqyi9WUA8TINkZ4cGk63UUE3u4pmB2k+ZMQRDuIOagv8UVd6j7k0T3+RRIb7beKTebNbcw==", + "license": "MIT", + "dependencies": { + "regenerator-runtime": "^0.14.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@emnapi/runtime": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.4.0.tgz", + "integrity": "sha512-64WYIf4UYcdLnbKn/umDlNjQDSS8AgZrI/R9+x5ilkUVFxXcA1Ebl+gQLc/6mERA4407Xof0R7wEyEuj091CVw==", + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@floating-ui/core": { + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.7.1.tgz", + "integrity": "sha512-azI0DrjMMfIug/ExbBaeDVJXcY0a7EPvPjb2xAJPa4HeimBX+Z18HK8QQR3jb6356SnDDdxx+hinMLcJEDdOjw==", + "license": "MIT", + "dependencies": { + "@floating-ui/utils": "^0.2.9" + } + }, + "node_modules/@floating-ui/dom": { + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.7.1.tgz", + "integrity": "sha512-cwsmW/zyw5ltYTUeeYJ60CnQuPqmGwuGVhG9w0PRaRKkAyi38BT5CKrpIbb+jtahSwUl04cWzSx9ZOIxeS6RsQ==", + "license": "MIT", + "dependencies": { + "@floating-ui/core": "^1.7.1", + "@floating-ui/utils": "^0.2.9" + } + }, + "node_modules/@floating-ui/utils": { + "version": "0.2.9", + "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.9.tgz", + "integrity": "sha512-MDWhGtE+eHw5JW7lq4qhc5yRLS11ERl1c7Z6Xd0a58DozHES6EnNNwUWbMiG4J9Cgj053Bhk8zvlhFYKVhULwg==", + "license": "MIT" + }, + "node_modules/@img/sharp-darwin-arm64": { + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.33.5.tgz", + "integrity": "sha512-UT4p+iz/2H4twwAoLCqfA9UH5pI6DggwKEGuaPy7nCVQ8ZsiY5PIcrRvD1DzuY3qYL07NtIQcWnBSY/heikIFQ==", + "cpu": [ + "arm64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-darwin-arm64": "1.0.4" + } + }, + "node_modules/@img/sharp-darwin-x64": { + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-darwin-x64/-/sharp-darwin-x64-0.33.5.tgz", + "integrity": "sha512-fyHac4jIc1ANYGRDxtiqelIbdWkIuQaI84Mv45KvGRRxSAa7o7d1ZKAOBaYbnepLC1WqxfpimdeWfvqqSGwR2Q==", + "cpu": [ + "x64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-darwin-x64": "1.0.4" + } + }, + "node_modules/@img/sharp-libvips-darwin-arm64": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-arm64/-/sharp-libvips-darwin-arm64-1.0.4.tgz", + "integrity": "sha512-XblONe153h0O2zuFfTAbQYAX2JhYmDHeWikp1LM9Hul9gVPjFY427k6dFEcOL72O01QxQsWi761svJ/ev9xEDg==", + "cpu": [ + "arm64" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "darwin" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-darwin-x64": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-x64/-/sharp-libvips-darwin-x64-1.0.4.tgz", + "integrity": "sha512-xnGR8YuZYfJGmWPvmlunFaWJsb9T/AO2ykoP3Fz/0X5XV2aoYBPkX6xqCQvUTKKiLddarLaxpzNe+b1hjeWHAQ==", + "cpu": [ + "x64" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "darwin" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-arm": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm/-/sharp-libvips-linux-arm-1.0.5.tgz", + "integrity": "sha512-gvcC4ACAOPRNATg/ov8/MnbxFDJqf/pDePbBnuBDcjsI8PssmjoKMAz4LtLaVi+OnSb5FK/yIOamqDwGmXW32g==", + "cpu": [ + "arm" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-arm64": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm64/-/sharp-libvips-linux-arm64-1.0.4.tgz", + "integrity": "sha512-9B+taZ8DlyyqzZQnoeIvDVR/2F4EbMepXMc/NdVbkzsJbzkUjhXv/70GQJ7tdLA4YJgNP25zukcxpX2/SueNrA==", + "cpu": [ + "arm64" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-s390x": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-s390x/-/sharp-libvips-linux-s390x-1.0.4.tgz", + "integrity": "sha512-u7Wz6ntiSSgGSGcjZ55im6uvTrOxSIS8/dgoVMoiGE9I6JAfU50yH5BoDlYA1tcuGS7g/QNtetJnxA6QEsCVTA==", + "cpu": [ + "s390x" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linux-x64": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-x64/-/sharp-libvips-linux-x64-1.0.4.tgz", + "integrity": "sha512-MmWmQ3iPFZr0Iev+BAgVMb3ZyC4KeFc3jFxnNbEPas60e1cIfevbtuyf9nDGIzOaW9PdnDciJm+wFFaTlj5xYw==", + "cpu": [ + "x64" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linuxmusl-arm64": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-arm64/-/sharp-libvips-linuxmusl-arm64-1.0.4.tgz", + "integrity": "sha512-9Ti+BbTYDcsbp4wfYib8Ctm1ilkugkA/uscUn6UXK1ldpC1JjiXbLfFZtRlBhjPZ5o1NCLiDbg8fhUPKStHoTA==", + "cpu": [ + "arm64" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-libvips-linuxmusl-x64": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-x64/-/sharp-libvips-linuxmusl-x64-1.0.4.tgz", + "integrity": "sha512-viYN1KX9m+/hGkJtvYYp+CCLgnJXwiQB39damAO7WMdKWlIhmYTfHjwSbQeUK/20vY154mwezd9HflVFM1wVSw==", + "cpu": [ + "x64" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-linux-arm": { + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm/-/sharp-linux-arm-0.33.5.tgz", + "integrity": "sha512-JTS1eldqZbJxjvKaAkxhZmBqPRGmxgu+qFKSInv8moZ2AmT5Yib3EQ1c6gp493HvrvV8QgdOXdyaIBrhvFhBMQ==", + "cpu": [ + "arm" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-arm": "1.0.5" + } + }, + "node_modules/@img/sharp-linux-arm64": { + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm64/-/sharp-linux-arm64-0.33.5.tgz", + "integrity": "sha512-JMVv+AMRyGOHtO1RFBiJy/MBsgz0x4AWrT6QoEVVTyh1E39TrCUpTRI7mx9VksGX4awWASxqCYLCV4wBZHAYxA==", + "cpu": [ + "arm64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-arm64": "1.0.4" + } + }, + "node_modules/@img/sharp-linux-s390x": { + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-s390x/-/sharp-linux-s390x-0.33.5.tgz", + "integrity": "sha512-y/5PCd+mP4CA/sPDKl2961b+C9d+vPAveS33s6Z3zfASk2j5upL6fXVPZi7ztePZ5CuH+1kW8JtvxgbuXHRa4Q==", + "cpu": [ + "s390x" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-s390x": "1.0.4" + } + }, + "node_modules/@img/sharp-linux-x64": { + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-x64/-/sharp-linux-x64-0.33.5.tgz", + "integrity": "sha512-opC+Ok5pRNAzuvq1AG0ar+1owsu842/Ab+4qvU879ippJBHvyY5n2mxF1izXqkPYlGuP/M556uh53jRLJmzTWA==", + "cpu": [ + "x64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-x64": "1.0.4" + } + }, + "node_modules/@img/sharp-linuxmusl-arm64": { + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-arm64/-/sharp-linuxmusl-arm64-0.33.5.tgz", + "integrity": "sha512-XrHMZwGQGvJg2V/oRSUfSAfjfPxO+4DkiRh6p2AFjLQztWUuY/o8Mq0eMQVIY7HJ1CDQUJlxGGZRw1a5bqmd1g==", + "cpu": [ + "arm64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linuxmusl-arm64": "1.0.4" + } + }, + "node_modules/@img/sharp-linuxmusl-x64": { + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-x64/-/sharp-linuxmusl-x64-0.33.5.tgz", + "integrity": "sha512-WT+d/cgqKkkKySYmqoZ8y3pxx7lx9vVejxW/W4DOFMYVSkErR+w7mf2u8m/y4+xHe7yY9DAXQMWQhpnMuFfScw==", + "cpu": [ + "x64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linuxmusl-x64": "1.0.4" + } + }, + "node_modules/@img/sharp-wasm32": { + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-wasm32/-/sharp-wasm32-0.33.5.tgz", + "integrity": "sha512-ykUW4LVGaMcU9lu9thv85CbRMAwfeadCJHRsg2GmeRa/cJxsVY9Rbd57JcMxBkKHag5U/x7TSBpScF4U8ElVzg==", + "cpu": [ + "wasm32" + ], + "license": "Apache-2.0 AND LGPL-3.0-or-later AND MIT", + "optional": true, + "dependencies": { + "@emnapi/runtime": "^1.2.0" + }, + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-win32-ia32": { + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-ia32/-/sharp-win32-ia32-0.33.5.tgz", + "integrity": "sha512-T36PblLaTwuVJ/zw/LaH0PdZkRz5rd3SmMHX8GSmR7vtNSP5Z6bQkExdSK7xGWyxLw4sUknBuugTelgw2faBbQ==", + "cpu": [ + "ia32" + ], + "license": "Apache-2.0 AND LGPL-3.0-or-later", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@img/sharp-win32-x64": { + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-x64/-/sharp-win32-x64-0.33.5.tgz", + "integrity": "sha512-MpY/o8/8kj+EcnxwvrP4aTJSWw/aZ7JIGR4aBeZkZw5B7/Jn+tY9/VNwtcoGmdT7GfggGIU4kygOMSbYnOrAbg==", + "cpu": [ + "x64" + ], + "license": "Apache-2.0 AND LGPL-3.0-or-later", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/@kurkle/color": { + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/@kurkle/color/-/color-0.3.4.tgz", + "integrity": "sha512-M5UknZPHRu3DEDWoipU6sE8PdkZ6Z/S+v4dD+Ke8IaNlpdSQah50lz1KtcFBa2vsdOnwbbnxJwVM4wty6udA5w==", + "license": "MIT" + }, + "node_modules/@next/env": { + "version": "15.2.4", + "resolved": "https://registry.npmjs.org/@next/env/-/env-15.2.4.tgz", + "integrity": "sha512-+SFtMgoiYP3WoSswuNmxJOCwi06TdWE733D+WPjpXIe4LXGULwEaofiiAy6kbS0+XjM5xF5n3lKuBwN2SnqD9g==", + "license": "MIT" + }, + "node_modules/@next/swc-darwin-arm64": { + "version": "15.2.4", + "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-15.2.4.tgz", + "integrity": "sha512-1AnMfs655ipJEDC/FHkSr0r3lXBgpqKo4K1kiwfUf3iE68rDFXZ1TtHdMvf7D0hMItgDZ7Vuq3JgNMbt/+3bYw==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-darwin-x64": { + "version": "15.2.4", + "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-15.2.4.tgz", + "integrity": "sha512-3qK2zb5EwCwxnO2HeO+TRqCubeI/NgCe+kL5dTJlPldV/uwCnUgC7VbEzgmxbfrkbjehL4H9BPztWOEtsoMwew==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-arm64-gnu": { + "version": "15.2.4", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-15.2.4.tgz", + "integrity": "sha512-HFN6GKUcrTWvem8AZN7tT95zPb0GUGv9v0d0iyuTb303vbXkkbHDp/DxufB04jNVD+IN9yHy7y/6Mqq0h0YVaQ==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-arm64-musl": { + "version": "15.2.4", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-15.2.4.tgz", + "integrity": "sha512-Oioa0SORWLwi35/kVB8aCk5Uq+5/ZIumMK1kJV+jSdazFm2NzPDztsefzdmzzpx5oGCJ6FkUC7vkaUseNTStNA==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-x64-gnu": { + "version": "15.2.4", + "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-15.2.4.tgz", + "integrity": "sha512-yb5WTRaHdkgOqFOZiu6rHV1fAEK0flVpaIN2HB6kxHVSy/dIajWbThS7qON3W9/SNOH2JWkVCyulgGYekMePuw==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-x64-musl": { + "version": "15.2.4", + "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-15.2.4.tgz", + "integrity": "sha512-Dcdv/ix6srhkM25fgXiyOieFUkz+fOYkHlydWCtB0xMST6X9XYI3yPDKBZt1xuhOytONsIFJFB08xXYsxUwJLw==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-win32-arm64-msvc": { + "version": "15.2.4", + "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-15.2.4.tgz", + "integrity": "sha512-dW0i7eukvDxtIhCYkMrZNQfNicPDExt2jPb9AZPpL7cfyUo7QSNl1DjsHjmmKp6qNAqUESyT8YFl/Aw91cNJJg==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-win32-x64-msvc": { + "version": "15.2.4", + "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-15.2.4.tgz", + "integrity": "sha512-SbnWkJmkS7Xl3kre8SdMF6F/XDh1DTFEhp0jRTj/uB8iPKoU2bb2NDfcu+iifv1+mxQEd1g2vvSxcZbXSKyWiQ==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@swc/counter": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz", + "integrity": "sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==", + "license": "Apache-2.0" + }, + "node_modules/@swc/helpers": { + "version": "0.5.15", + "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.15.tgz", + "integrity": "sha512-JQ5TuMi45Owi4/BIMAJBoSQoOJu12oOk/gADqlcUL9JEdHB8vyjUSsxqeNXnmXHjYKMi2WcYtezGEEhqUI/E2g==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.8.0" + } + }, + "node_modules/@tailwindcss/node": { + "version": "4.0.17", + "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.0.17.tgz", + "integrity": "sha512-LIdNwcqyY7578VpofXyqjH6f+3fP4nrz7FBLki5HpzqjYfXdF2m/eW18ZfoKePtDGg90Bvvfpov9d2gy5XVCbg==", + "dev": true, + "license": "MIT", + "dependencies": { + "enhanced-resolve": "^5.18.1", + "jiti": "^2.4.2", + "tailwindcss": "4.0.17" + } + }, + "node_modules/@tailwindcss/oxide": { + "version": "4.0.17", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.0.17.tgz", + "integrity": "sha512-B4OaUIRD2uVrULpAD1Yksx2+wNarQr2rQh65nXqaqbLY1jCd8fO+3KLh/+TH4Hzh2NTHQvgxVbPdUDOtLk7vAw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10" + }, + "optionalDependencies": { + "@tailwindcss/oxide-android-arm64": "4.0.17", + "@tailwindcss/oxide-darwin-arm64": "4.0.17", + "@tailwindcss/oxide-darwin-x64": "4.0.17", + "@tailwindcss/oxide-freebsd-x64": "4.0.17", + "@tailwindcss/oxide-linux-arm-gnueabihf": "4.0.17", + "@tailwindcss/oxide-linux-arm64-gnu": "4.0.17", + "@tailwindcss/oxide-linux-arm64-musl": "4.0.17", + "@tailwindcss/oxide-linux-x64-gnu": "4.0.17", + "@tailwindcss/oxide-linux-x64-musl": "4.0.17", + "@tailwindcss/oxide-win32-arm64-msvc": "4.0.17", + "@tailwindcss/oxide-win32-x64-msvc": "4.0.17" + } + }, + "node_modules/@tailwindcss/oxide-android-arm64": { + "version": "4.0.17", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-android-arm64/-/oxide-android-arm64-4.0.17.tgz", + "integrity": "sha512-3RfO0ZK64WAhop+EbHeyxGThyDr/fYhxPzDbEQjD2+v7ZhKTb2svTWy+KK+J1PHATus2/CQGAGp7pHY/8M8ugg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-darwin-arm64": { + "version": "4.0.17", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.0.17.tgz", + "integrity": "sha512-e1uayxFQCCDuzTk9s8q7MC5jFN42IY7nzcr5n0Mw/AcUHwD6JaBkXnATkD924ZsHyPDvddnusIEvkgLd2CiREg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-darwin-x64": { + "version": "4.0.17", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-x64/-/oxide-darwin-x64-4.0.17.tgz", + "integrity": "sha512-d6z7HSdOKfXQ0HPlVx1jduUf/YtBuCCtEDIEFeBCzgRRtDsUuRtofPqxIVaSCUTOk5+OfRLonje6n9dF6AH8wQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-freebsd-x64": { + "version": "4.0.17", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-freebsd-x64/-/oxide-freebsd-x64-4.0.17.tgz", + "integrity": "sha512-EjrVa6lx3wzXz3l5MsdOGtYIsRjgs5Mru6lDv4RuiXpguWeOb3UzGJ7vw7PEzcFadKNvNslEQqoAABeMezprxQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm-gnueabihf": { + "version": "4.0.17", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm-gnueabihf/-/oxide-linux-arm-gnueabihf-4.0.17.tgz", + "integrity": "sha512-65zXfCOdi8wuaY0Ye6qMR5LAXokHYtrGvo9t/NmxvSZtCCitXV/gzJ/WP5ksXPhff1SV5rov0S+ZIZU+/4eyCQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm64-gnu": { + "version": "4.0.17", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-gnu/-/oxide-linux-arm64-gnu-4.0.17.tgz", + "integrity": "sha512-+aaq6hJ8ioTdbJV5IA1WjWgLmun4T7eYLTvJIToiXLHy5JzUERRbIZjAcjgK9qXMwnvuu7rqpxzej+hGoEcG5g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm64-musl": { + "version": "4.0.17", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-musl/-/oxide-linux-arm64-musl-4.0.17.tgz", + "integrity": "sha512-/FhWgZCdUGAeYHYnZKekiOC0aXFiBIoNCA0bwzkICiMYS5Rtx2KxFfMUXQVnl4uZRblG5ypt5vpPhVaXgGk80w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-x64-gnu": { + "version": "4.0.17", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.0.17.tgz", + "integrity": "sha512-gELJzOHK6GDoIpm/539Golvk+QWZjxQcbkKq9eB2kzNkOvrP0xc5UPgO9bIMNt1M48mO8ZeNenCMGt6tfkvVBg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-x64-musl": { + "version": "4.0.17", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.0.17.tgz", + "integrity": "sha512-68NwxcJrZn94IOW4TysMIbYv5AlM6So1luTlbYUDIGnKma1yTFGBRNEJ+SacJ3PZE2rgcTBNRHX1TB4EQ/XEHw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-win32-arm64-msvc": { + "version": "4.0.17", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.0.17.tgz", + "integrity": "sha512-AkBO8efP2/7wkEXkNlXzRD4f/7WerqKHlc6PWb5v0jGbbm22DFBLbIM19IJQ3b+tNewQZa+WnPOaGm0SmwMNjw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-win32-x64-msvc": { + "version": "4.0.17", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-x64-msvc/-/oxide-win32-x64-msvc-4.0.17.tgz", + "integrity": "sha512-7/DTEvXcoWlqX0dAlcN0zlmcEu9xSermuo7VNGX9tJ3nYMdo735SHvbrHDln1+LYfF6NhJ3hjbpbjkMOAGmkDg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/postcss": { + "version": "4.0.17", + "resolved": "https://registry.npmjs.org/@tailwindcss/postcss/-/postcss-4.0.17.tgz", + "integrity": "sha512-qeJbRTB5FMZXmuJF+eePd235EGY6IyJZF0Bh0YM6uMcCI4L9Z7dy+lPuLAhxOJzxnajsbjPoDAKOuAqZRtf1PQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@alloc/quick-lru": "^5.2.0", + "@tailwindcss/node": "4.0.17", + "@tailwindcss/oxide": "4.0.17", + "lightningcss": "1.29.2", + "postcss": "^8.4.41", + "tailwindcss": "4.0.17" + } + }, + "node_modules/@types/d3-array": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.1.tgz", + "integrity": "sha512-Y2Jn2idRrLzUfAKV2LyRImR+y4oa2AntrgID95SHJxuMUrkNXmanDSed71sRNZysveJVt1hLLemQZIady0FpEg==", + "license": "MIT" + }, + "node_modules/@types/d3-color": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/@types/d3-color/-/d3-color-3.1.3.tgz", + "integrity": "sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==", + "license": "MIT" + }, + "node_modules/@types/d3-ease": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-ease/-/d3-ease-3.0.2.tgz", + "integrity": "sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==", + "license": "MIT" + }, + "node_modules/@types/d3-interpolate": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-interpolate/-/d3-interpolate-3.0.4.tgz", + "integrity": "sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==", + "license": "MIT", + "dependencies": { + "@types/d3-color": "*" + } + }, + "node_modules/@types/d3-path": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@types/d3-path/-/d3-path-3.1.1.tgz", + "integrity": "sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg==", + "license": "MIT" + }, + "node_modules/@types/d3-scale": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.9.tgz", + "integrity": "sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw==", + "license": "MIT", + "dependencies": { + "@types/d3-time": "*" + } + }, + "node_modules/@types/d3-shape": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/@types/d3-shape/-/d3-shape-3.1.7.tgz", + "integrity": "sha512-VLvUQ33C+3J+8p+Daf+nYSOsjB4GXp19/S/aGo60m9h1v6XaxjiT82lKVWJCfzhtuZ3yD7i/TPeC/fuKLLOSmg==", + "license": "MIT", + "dependencies": { + "@types/d3-path": "*" + } + }, + "node_modules/@types/d3-time": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.4.tgz", + "integrity": "sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g==", + "license": "MIT" + }, + "node_modules/@types/d3-timer": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-timer/-/d3-timer-3.0.2.tgz", + "integrity": "sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==", + "license": "MIT" + }, + "node_modules/@types/hoist-non-react-statics": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.6.tgz", + "integrity": "sha512-lPByRJUer/iN/xa4qpyL0qmL11DqNW81iU/IG1S3uvRUq4oKagz8VCxZjiWkumgt66YT3vOdDgZ0o32sGKtCEw==", + "license": "MIT", + "dependencies": { + "@types/react": "*", + "hoist-non-react-statics": "^3.3.0" + } + }, + "node_modules/@types/node": { + "version": "20.17.28", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.28.tgz", + "integrity": "sha512-DHlH/fNL6Mho38jTy7/JT7sn2wnXI+wULR6PV4gy4VHLVvnrV/d3pHAMQHhc4gjdLmK2ZiPoMxzp6B3yRajLSQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.19.2" + } + }, + "node_modules/@types/react": { + "version": "19.0.12", + "resolved": "https://registry.npmjs.org/@types/react/-/react-19.0.12.tgz", + "integrity": "sha512-V6Ar115dBDrjbtXSrS+/Oruobc+qVbbUxDFC1RSbRqLt5SYvxxyIDrSC85RWml54g+jfNeEMZhEj7wW07ONQhA==", + "license": "MIT", + "dependencies": { + "csstype": "^3.0.2" + } + }, + "node_modules/@types/react-dom": { + "version": "19.0.4", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-19.0.4.tgz", + "integrity": "sha512-4fSQ8vWFkg+TGhePfUzVmat3eC14TXYSsiiDSLI0dVLsrm9gZFABjPy/Qu6TKgl1tq1Bu1yDsuQgY3A3DOjCcg==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "@types/react": "^19.0.0" + } + }, + "node_modules/@types/react-grid-layout": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/@types/react-grid-layout/-/react-grid-layout-1.3.5.tgz", + "integrity": "sha512-WH/po1gcEcoR6y857yAnPGug+ZhkF4PaTUxgAbwfeSH/QOgVSakKHBXoPGad/sEznmkiaK3pqHk+etdWisoeBQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/react": "*" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "license": "MIT" + }, + "node_modules/axios": { + "version": "1.8.4", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.8.4.tgz", + "integrity": "sha512-eBSYY4Y68NNlHbHBMdeDmKNtDgXWhQsJcGqzO3iLUM0GraQFSS9cVgPX5I9b3lbdFKyYoAEGAZF1DwhTaljNAw==", + "license": "MIT", + "dependencies": { + "follow-redirects": "^1.15.6", + "form-data": "^4.0.0", + "proxy-from-env": "^1.1.0" + } + }, + "node_modules/busboy": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz", + "integrity": "sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==", + "dependencies": { + "streamsearch": "^1.1.0" + }, + "engines": { + "node": ">=10.16.0" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001707", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001707.tgz", + "integrity": "sha512-3qtRjw/HQSMlDWf+X79N206fepf4SOOU6SQLMaq/0KkZLmSjPxAkBOQQ+FxbHKfHmYLZFfdWsO3KA90ceHPSnw==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/chart.js": { + "version": "4.4.8", + "resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.4.8.tgz", + "integrity": "sha512-IkGZlVpXP+83QpMm4uxEiGqSI7jFizwVtF3+n5Pc3k7sMO+tkd0qxh2OzLhenM0K80xtmAONWGBn082EiBQSDA==", + "license": "MIT", + "dependencies": { + "@kurkle/color": "^0.3.0" + }, + "engines": { + "pnpm": ">=8" + } + }, + "node_modules/class-variance-authority": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/class-variance-authority/-/class-variance-authority-0.7.1.tgz", + "integrity": "sha512-Ka+9Trutv7G8M6WT6SeiRWz792K5qEqIGEGzXKhAE6xOWAY6pPH8U+9IY3oCMv6kqTmLsv7Xh/2w2RigkePMsg==", + "license": "Apache-2.0", + "dependencies": { + "clsx": "^2.1.1" + }, + "funding": { + "url": "https://polar.sh/cva" + } + }, + "node_modules/classnames": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.5.1.tgz", + "integrity": "sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow==", + "license": "MIT" + }, + "node_modules/client-only": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/client-only/-/client-only-0.0.1.tgz", + "integrity": "sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==", + "license": "MIT" + }, + "node_modules/clsx": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", + "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/color": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/color/-/color-4.2.3.tgz", + "integrity": "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==", + "license": "MIT", + "optional": true, + "dependencies": { + "color-convert": "^2.0.1", + "color-string": "^1.9.0" + }, + "engines": { + "node": ">=12.5.0" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "license": "MIT", + "optional": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "license": "MIT", + "optional": true + }, + "node_modules/color-string": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz", + "integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==", + "license": "MIT", + "optional": true, + "dependencies": { + "color-name": "^1.0.0", + "simple-swizzle": "^0.2.2" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "license": "MIT", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/cookie": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-1.0.2.tgz", + "integrity": "sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA==", + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/csstype": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", + "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", + "license": "MIT" + }, + "node_modules/d3-array": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz", + "integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==", + "license": "ISC", + "dependencies": { + "internmap": "1 - 2" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-color": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz", + "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-ease": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz", + "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-format": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.1.0.tgz", + "integrity": "sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-interpolate": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz", + "integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==", + "license": "ISC", + "dependencies": { + "d3-color": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-path": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz", + "integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-scale": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz", + "integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==", + "license": "ISC", + "dependencies": { + "d3-array": "2.10.0 - 3", + "d3-format": "1 - 3", + "d3-interpolate": "1.2.0 - 3", + "d3-time": "2.1.1 - 3", + "d3-time-format": "2 - 4" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-shape": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz", + "integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==", + "license": "ISC", + "dependencies": { + "d3-path": "^3.1.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-time": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz", + "integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==", + "license": "ISC", + "dependencies": { + "d3-array": "2 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-time-format": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz", + "integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==", + "license": "ISC", + "dependencies": { + "d3-time": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-timer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz", + "integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/decimal.js-light": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/decimal.js-light/-/decimal.js-light-2.5.1.tgz", + "integrity": "sha512-qIMFpTMZmny+MMIitAB6D7iVPEorVw6YQRWkvarTkT4tBeSLLiHzcwj6q0MmYSFCiVpiqPJTJEYIrpcPzVEIvg==", + "license": "MIT" + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "license": "MIT", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/detect-libc": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz", + "integrity": "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==", + "devOptional": true, + "license": "Apache-2.0", + "engines": { + "node": ">=8" + } + }, + "node_modules/dom-helpers": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/dom-helpers/-/dom-helpers-5.2.1.tgz", + "integrity": "sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA==", + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.8.7", + "csstype": "^3.0.2" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/enhanced-resolve": { + "version": "5.18.1", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.1.tgz", + "integrity": "sha512-ZSW3ma5GkcQBIpwZTSRAI8N71Uuwgs93IezB7mf7R60tC8ZbJideoDNKjHn2O9KIlx6rkGTTEk1xUCK2E1Y2Yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.4", + "tapable": "^2.2.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/eventemitter3": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", + "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==", + "license": "MIT" + }, + "node_modules/fast-equals": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/fast-equals/-/fast-equals-5.2.2.tgz", + "integrity": "sha512-V7/RktU11J3I36Nwq2JnZEM7tNm17eBJz+u25qdxBZeCKiX6BkVSZQjwWIr+IobgnZy+ag73tTZgZi7tr0LrBw==", + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/follow-redirects": { + "version": "1.15.9", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz", + "integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "license": "MIT", + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/form-data": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.2.tgz", + "integrity": "sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==", + "license": "MIT", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/framer-motion": { + "version": "12.6.3", + "resolved": "https://registry.npmjs.org/framer-motion/-/framer-motion-12.6.3.tgz", + "integrity": "sha512-2hsqknz23aloK85bzMc9nSR2/JP+fValQ459ZTVElFQ0xgwR2YqNjYSuDZdFBPOwVCt4Q9jgyTt6hg6sVOALzw==", + "license": "MIT", + "dependencies": { + "motion-dom": "^12.6.3", + "motion-utils": "^12.6.3", + "tslib": "^2.4.0" + }, + "peerDependencies": { + "@emotion/is-prop-valid": "*", + "react": "^18.0.0 || ^19.0.0", + "react-dom": "^18.0.0 || ^19.0.0" + }, + "peerDependenciesMeta": { + "@emotion/is-prop-valid": { + "optional": true + }, + "react": { + "optional": true + }, + "react-dom": { + "optional": true + } + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/hoist-non-react-statics": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz", + "integrity": "sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==", + "license": "BSD-3-Clause", + "dependencies": { + "react-is": "^16.7.0" + } + }, + "node_modules/hoist-non-react-statics/node_modules/react-is": { + "version": "16.13.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", + "license": "MIT" + }, + "node_modules/internmap": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz", + "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/is-arrayish": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", + "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==", + "license": "MIT", + "optional": true + }, + "node_modules/jiti": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.4.2.tgz", + "integrity": "sha512-rg9zJN+G4n2nfJl5MW3BMygZX56zKPNVEYYqq7adpmMh4Jn2QNEwhvQlFy6jPVdcod7txZtKHWnyZiA3a0zP7A==", + "dev": true, + "license": "MIT", + "bin": { + "jiti": "lib/jiti-cli.mjs" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "license": "MIT" + }, + "node_modules/lightningcss": { + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.29.2.tgz", + "integrity": "sha512-6b6gd/RUXKaw5keVdSEtqFVdzWnU5jMxTUjA2bVcMNPLwSQ08Sv/UodBVtETLCn7k4S1Ibxwh7k68IwLZPgKaA==", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "detect-libc": "^2.0.3" + }, + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + }, + "optionalDependencies": { + "lightningcss-darwin-arm64": "1.29.2", + "lightningcss-darwin-x64": "1.29.2", + "lightningcss-freebsd-x64": "1.29.2", + "lightningcss-linux-arm-gnueabihf": "1.29.2", + "lightningcss-linux-arm64-gnu": "1.29.2", + "lightningcss-linux-arm64-musl": "1.29.2", + "lightningcss-linux-x64-gnu": "1.29.2", + "lightningcss-linux-x64-musl": "1.29.2", + "lightningcss-win32-arm64-msvc": "1.29.2", + "lightningcss-win32-x64-msvc": "1.29.2" + } + }, + "node_modules/lightningcss-darwin-arm64": { + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.29.2.tgz", + "integrity": "sha512-cK/eMabSViKn/PG8U/a7aCorpeKLMlK0bQeNHmdb7qUnBkNPnL+oV5DjJUo0kqWsJUapZsM4jCfYItbqBDvlcA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-darwin-x64": { + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.29.2.tgz", + "integrity": "sha512-j5qYxamyQw4kDXX5hnnCKMf3mLlHvG44f24Qyi2965/Ycz829MYqjrVg2H8BidybHBp9kom4D7DR5VqCKDXS0w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-freebsd-x64": { + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.29.2.tgz", + "integrity": "sha512-wDk7M2tM78Ii8ek9YjnY8MjV5f5JN2qNVO+/0BAGZRvXKtQrBC4/cn4ssQIpKIPP44YXw6gFdpUF+Ps+RGsCwg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm-gnueabihf": { + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.29.2.tgz", + "integrity": "sha512-IRUrOrAF2Z+KExdExe3Rz7NSTuuJ2HvCGlMKoquK5pjvo2JY4Rybr+NrKnq0U0hZnx5AnGsuFHjGnNT14w26sg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm64-gnu": { + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.29.2.tgz", + "integrity": "sha512-KKCpOlmhdjvUTX/mBuaKemp0oeDIBBLFiU5Fnqxh1/DZ4JPZi4evEH7TKoSBFOSOV3J7iEmmBaw/8dpiUvRKlQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm64-musl": { + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.29.2.tgz", + "integrity": "sha512-Q64eM1bPlOOUgxFmoPUefqzY1yV3ctFPE6d/Vt7WzLW4rKTv7MyYNky+FWxRpLkNASTnKQUaiMJ87zNODIrrKQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-x64-gnu": { + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.29.2.tgz", + "integrity": "sha512-0v6idDCPG6epLXtBH/RPkHvYx74CVziHo6TMYga8O2EiQApnUPZsbR9nFNrg2cgBzk1AYqEd95TlrsL7nYABQg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-x64-musl": { + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.29.2.tgz", + "integrity": "sha512-rMpz2yawkgGT8RULc5S4WiZopVMOFWjiItBT7aSfDX4NQav6M44rhn5hjtkKzB+wMTRlLLqxkeYEtQ3dd9696w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-win32-arm64-msvc": { + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.29.2.tgz", + "integrity": "sha512-nL7zRW6evGQqYVu/bKGK+zShyz8OVzsCotFgc7judbt6wnB2KbiKKJwBE4SGoDBQ1O94RjW4asrCjQL4i8Fhbw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-win32-x64-msvc": { + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.29.2.tgz", + "integrity": "sha512-EdIUW3B2vLuHmv7urfzMI/h2fmlnOQBk1xlsDxkN1tCWKjNFjfLhGxYk8C8mzpSfr+A6jFFIi8fU6LbQGsRWjA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "license": "MIT" + }, + "node_modules/loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "license": "MIT", + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" + }, + "bin": { + "loose-envify": "cli.js" + } + }, + "node_modules/lucide-react": { + "version": "0.486.0", + "resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.486.0.tgz", + "integrity": "sha512-xWop/wMsC1ikiEVLZrxXjPKw4vU/eAip33G2mZHgbWnr4Nr5Rt4Vx4s/q1D3B/rQVbxjOuqASkEZcUxDEKzecw==", + "license": "ISC", + "peerDependencies": { + "react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/motion": { + "version": "12.6.3", + "resolved": "https://registry.npmjs.org/motion/-/motion-12.6.3.tgz", + "integrity": "sha512-zw/vqUgv5F5m9fkvOl/eCv2AF1+tkeZl3fu2uIlisIaip8sm5e0CouAl6GkdiRoF+G7s29CjqMdIyPMirwUGHA==", + "license": "MIT", + "dependencies": { + "framer-motion": "^12.6.3", + "tslib": "^2.4.0" + }, + "peerDependencies": { + "@emotion/is-prop-valid": "*", + "react": "^18.0.0 || ^19.0.0", + "react-dom": "^18.0.0 || ^19.0.0" + }, + "peerDependenciesMeta": { + "@emotion/is-prop-valid": { + "optional": true + }, + "react": { + "optional": true + }, + "react-dom": { + "optional": true + } + } + }, + "node_modules/motion-dom": { + "version": "12.6.3", + "resolved": "https://registry.npmjs.org/motion-dom/-/motion-dom-12.6.3.tgz", + "integrity": "sha512-gRY08RjcnzgFYLemUZ1lo/e9RkBxR+6d4BRvoeZDSeArG4XQXERSPapKl3LNQRu22Sndjf1h+iavgY0O4NrYqA==", + "license": "MIT", + "dependencies": { + "motion-utils": "^12.6.3" + } + }, + "node_modules/motion-utils": { + "version": "12.6.3", + "resolved": "https://registry.npmjs.org/motion-utils/-/motion-utils-12.6.3.tgz", + "integrity": "sha512-R/b3Ia2VxtTNZ4LTEO5pKYau1OUNHOuUfxuP0WFCTDYdHkeTBR9UtxR1cc8mDmKr8PEhmmfnTKGz3rSMjNRoRg==", + "license": "MIT" + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/next": { + "version": "15.2.4", + "resolved": "https://registry.npmjs.org/next/-/next-15.2.4.tgz", + "integrity": "sha512-VwL+LAaPSxEkd3lU2xWbgEOtrM8oedmyhBqaVNmgKB+GvZlCy9rgaEc+y2on0wv+l0oSFqLtYD6dcC1eAedUaQ==", + "license": "MIT", + "dependencies": { + "@next/env": "15.2.4", + "@swc/counter": "0.1.3", + "@swc/helpers": "0.5.15", + "busboy": "1.6.0", + "caniuse-lite": "^1.0.30001579", + "postcss": "8.4.31", + "styled-jsx": "5.1.6" + }, + "bin": { + "next": "dist/bin/next" + }, + "engines": { + "node": "^18.18.0 || ^19.8.0 || >= 20.0.0" + }, + "optionalDependencies": { + "@next/swc-darwin-arm64": "15.2.4", + "@next/swc-darwin-x64": "15.2.4", + "@next/swc-linux-arm64-gnu": "15.2.4", + "@next/swc-linux-arm64-musl": "15.2.4", + "@next/swc-linux-x64-gnu": "15.2.4", + "@next/swc-linux-x64-musl": "15.2.4", + "@next/swc-win32-arm64-msvc": "15.2.4", + "@next/swc-win32-x64-msvc": "15.2.4", + "sharp": "^0.33.5" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.1.0", + "@playwright/test": "^1.41.2", + "babel-plugin-react-compiler": "*", + "react": "^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0", + "react-dom": "^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0", + "sass": "^1.3.0" + }, + "peerDependenciesMeta": { + "@opentelemetry/api": { + "optional": true + }, + "@playwright/test": { + "optional": true + }, + "babel-plugin-react-compiler": { + "optional": true + }, + "sass": { + "optional": true + } + } + }, + "node_modules/next/node_modules/postcss": { + "version": "8.4.31", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz", + "integrity": "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.6", + "picocolors": "^1.0.0", + "source-map-js": "^1.0.2" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "license": "ISC" + }, + "node_modules/postcss": { + "version": "8.5.3", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.3.tgz", + "integrity": "sha512-dle9A3yYxlBSrt8Fu+IpjGT8SY8hN0mlaA6GY8t0P5PjIOZemULz/E2Bnm/2dcUOena75OTNkHI76uZBNUUq3A==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.8", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/prop-types": { + "version": "15.8.1", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", + "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.13.1" + } + }, + "node_modules/prop-types/node_modules/react-is": { + "version": "16.13.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", + "license": "MIT" + }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", + "license": "MIT" + }, + "node_modules/react": { + "version": "19.1.0", + "resolved": "https://registry.npmjs.org/react/-/react-19.1.0.tgz", + "integrity": "sha512-FS+XFBNvn3GTAWq26joslQgWNoFu08F4kl0J4CgdNKADkdSGXQyTCnKteIAJy96Br6YbpEU1LSzV5dYtjMkMDg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-chartjs-2": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/react-chartjs-2/-/react-chartjs-2-5.3.0.tgz", + "integrity": "sha512-UfZZFnDsERI3c3CZGxzvNJd02SHjaSJ8kgW1djn65H1KK8rehwTjyrRKOG3VTMG8wtHZ5rgAO5oTHtHi9GCCmw==", + "license": "MIT", + "peerDependencies": { + "chart.js": "^4.1.1", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, + "node_modules/react-cookie": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/react-cookie/-/react-cookie-8.0.1.tgz", + "integrity": "sha512-QNdAd0MLuAiDiLcDU/2s/eyKmmfMHtjPUKJ2dZ/5CcQ9QKUium4B3o61/haq6PQl/YWFqC5PO8GvxeHKhy3GFA==", + "license": "MIT", + "dependencies": { + "@types/hoist-non-react-statics": "^3.3.6", + "hoist-non-react-statics": "^3.3.2", + "universal-cookie": "^8.0.0" + }, + "peerDependencies": { + "react": ">= 16.3.0" + } + }, + "node_modules/react-dom": { + "version": "19.1.0", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.1.0.tgz", + "integrity": "sha512-Xs1hdnE+DyKgeHJeJznQmYMIBG3TKIHJJT95Q58nHLSrElKlGQqDTR2HQ9fx5CN/Gk6Vh/kupBTDLU11/nDk/g==", + "license": "MIT", + "dependencies": { + "scheduler": "^0.26.0" + }, + "peerDependencies": { + "react": "^19.1.0" + } + }, + "node_modules/react-draggable": { + "version": "4.4.6", + "resolved": "https://registry.npmjs.org/react-draggable/-/react-draggable-4.4.6.tgz", + "integrity": "sha512-LtY5Xw1zTPqHkVmtM3X8MUOxNDOUhv/khTgBgrUvwaS064bwVvxT+q5El0uUFNx5IEPKXuRejr7UqLwBIg5pdw==", + "license": "MIT", + "dependencies": { + "clsx": "^1.1.1", + "prop-types": "^15.8.1" + }, + "peerDependencies": { + "react": ">= 16.3.0", + "react-dom": ">= 16.3.0" + } + }, + "node_modules/react-draggable/node_modules/clsx": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-1.2.1.tgz", + "integrity": "sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/react-grid-layout": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/react-grid-layout/-/react-grid-layout-1.5.1.tgz", + "integrity": "sha512-4Fr+kKMk0+m1HL/BWfHxi/lRuaOmDNNKQDcu7m12+NEYcen20wIuZFo789u3qWCyvUsNUxCiyf0eKq4WiJSNYw==", + "license": "MIT", + "dependencies": { + "clsx": "^2.0.0", + "fast-equals": "^4.0.3", + "prop-types": "^15.8.1", + "react-draggable": "^4.4.5", + "react-resizable": "^3.0.5", + "resize-observer-polyfill": "^1.5.1" + }, + "peerDependencies": { + "react": ">= 16.3.0", + "react-dom": ">= 16.3.0" + } + }, + "node_modules/react-grid-layout/node_modules/fast-equals": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/fast-equals/-/fast-equals-4.0.3.tgz", + "integrity": "sha512-G3BSX9cfKttjr+2o1O22tYMLq0DPluZnYtq1rXumE1SpL/F/SLIfHx08WYQoWSIpeMYf8sRbJ8++71+v6Pnxfg==", + "license": "MIT" + }, + "node_modules/react-is": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz", + "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==", + "license": "MIT" + }, + "node_modules/react-resizable": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/react-resizable/-/react-resizable-3.0.5.tgz", + "integrity": "sha512-vKpeHhI5OZvYn82kXOs1bC8aOXktGU5AmKAgaZS4F5JPburCtbmDPqE7Pzp+1kN4+Wb81LlF33VpGwWwtXem+w==", + "license": "MIT", + "dependencies": { + "prop-types": "15.x", + "react-draggable": "^4.0.3" + }, + "peerDependencies": { + "react": ">= 16.3" + } + }, + "node_modules/react-smooth": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/react-smooth/-/react-smooth-4.0.4.tgz", + "integrity": "sha512-gnGKTpYwqL0Iii09gHobNolvX4Kiq4PKx6eWBCYYix+8cdw+cGo3do906l1NBPKkSWx1DghC1dlWG9L2uGd61Q==", + "license": "MIT", + "dependencies": { + "fast-equals": "^5.0.1", + "prop-types": "^15.8.1", + "react-transition-group": "^4.4.5" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, + "node_modules/react-tooltip": { + "version": "5.28.1", + "resolved": "https://registry.npmjs.org/react-tooltip/-/react-tooltip-5.28.1.tgz", + "integrity": "sha512-ZA4oHwoIIK09TS7PvSLFcRlje1wGZaxw6xHvfrzn6T82UcMEfEmHVCad16Gnr4NDNDh93HyN037VK4HDi5odfQ==", + "license": "MIT", + "dependencies": { + "@floating-ui/dom": "^1.6.1", + "classnames": "^2.3.0" + }, + "peerDependencies": { + "react": ">=16.14.0", + "react-dom": ">=16.14.0" + } + }, + "node_modules/react-transition-group": { + "version": "4.4.5", + "resolved": "https://registry.npmjs.org/react-transition-group/-/react-transition-group-4.4.5.tgz", + "integrity": "sha512-pZcd1MCJoiKiBR2NRxeCRg13uCXbydPnmB4EOeRrY7480qNWO8IIgQG6zlDkm6uRMsURXPuKq0GWtiM59a5Q6g==", + "license": "BSD-3-Clause", + "dependencies": { + "@babel/runtime": "^7.5.5", + "dom-helpers": "^5.0.1", + "loose-envify": "^1.4.0", + "prop-types": "^15.6.2" + }, + "peerDependencies": { + "react": ">=16.6.0", + "react-dom": ">=16.6.0" + } + }, + "node_modules/recharts": { + "version": "2.15.1", + "resolved": "https://registry.npmjs.org/recharts/-/recharts-2.15.1.tgz", + "integrity": "sha512-v8PUTUlyiDe56qUj82w/EDVuzEFXwEHp9/xOowGAZwfLjB9uAy3GllQVIYMWF6nU+qibx85WF75zD7AjqoT54Q==", + "license": "MIT", + "dependencies": { + "clsx": "^2.0.0", + "eventemitter3": "^4.0.1", + "lodash": "^4.17.21", + "react-is": "^18.3.1", + "react-smooth": "^4.0.4", + "recharts-scale": "^0.4.4", + "tiny-invariant": "^1.3.1", + "victory-vendor": "^36.6.8" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "react": "^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "react-dom": "^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, + "node_modules/recharts-scale": { + "version": "0.4.5", + "resolved": "https://registry.npmjs.org/recharts-scale/-/recharts-scale-0.4.5.tgz", + "integrity": "sha512-kivNFO+0OcUNu7jQquLXAxz1FIwZj8nrj+YkOKc5694NbjCvcT6aSZiIzNzd2Kul4o4rTto8QVR9lMNtxD4G1w==", + "license": "MIT", + "dependencies": { + "decimal.js-light": "^2.4.1" + } + }, + "node_modules/regenerator-runtime": { + "version": "0.14.1", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", + "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==", + "license": "MIT" + }, + "node_modules/resize-observer-polyfill": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/resize-observer-polyfill/-/resize-observer-polyfill-1.5.1.tgz", + "integrity": "sha512-LwZrotdHOo12nQuZlHEmtuXdqGoOD0OhaxopaNFxWzInpEgaLWoVuAMbTzixuosCx2nEG58ngzW3vxdWoxIgdg==", + "license": "MIT" + }, + "node_modules/scheduler": { + "version": "0.26.0", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.26.0.tgz", + "integrity": "sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA==", + "license": "MIT" + }, + "node_modules/semver": { + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", + "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", + "license": "ISC", + "optional": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/sharp": { + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.33.5.tgz", + "integrity": "sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw==", + "hasInstallScript": true, + "license": "Apache-2.0", + "optional": true, + "dependencies": { + "color": "^4.2.3", + "detect-libc": "^2.0.3", + "semver": "^7.6.3" + }, + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-darwin-arm64": "0.33.5", + "@img/sharp-darwin-x64": "0.33.5", + "@img/sharp-libvips-darwin-arm64": "1.0.4", + "@img/sharp-libvips-darwin-x64": "1.0.4", + "@img/sharp-libvips-linux-arm": "1.0.5", + "@img/sharp-libvips-linux-arm64": "1.0.4", + "@img/sharp-libvips-linux-s390x": "1.0.4", + "@img/sharp-libvips-linux-x64": "1.0.4", + "@img/sharp-libvips-linuxmusl-arm64": "1.0.4", + "@img/sharp-libvips-linuxmusl-x64": "1.0.4", + "@img/sharp-linux-arm": "0.33.5", + "@img/sharp-linux-arm64": "0.33.5", + "@img/sharp-linux-s390x": "0.33.5", + "@img/sharp-linux-x64": "0.33.5", + "@img/sharp-linuxmusl-arm64": "0.33.5", + "@img/sharp-linuxmusl-x64": "0.33.5", + "@img/sharp-wasm32": "0.33.5", + "@img/sharp-win32-ia32": "0.33.5", + "@img/sharp-win32-x64": "0.33.5" + } + }, + "node_modules/simple-swizzle": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", + "integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==", + "license": "MIT", + "optional": true, + "dependencies": { + "is-arrayish": "^0.3.1" + } + }, + "node_modules/simplex-noise": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/simplex-noise/-/simplex-noise-4.0.3.tgz", + "integrity": "sha512-qSE2I4AngLQG7BXqoZj51jokT4WUXe8mOBrvfOXpci8+6Yu44+/dD5zqDpOx3Ux792eamTd2lLcI8jqFntk/lg==", + "license": "MIT" + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/streamsearch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-1.1.0.tgz", + "integrity": "sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==", + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/styled-jsx": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/styled-jsx/-/styled-jsx-5.1.6.tgz", + "integrity": "sha512-qSVyDTeMotdvQYoHWLNGwRFJHC+i+ZvdBRYosOFgC+Wg1vx4frN2/RG/NA7SYqqvKNLf39P2LSRA2pu6n0XYZA==", + "license": "MIT", + "dependencies": { + "client-only": "0.0.1" + }, + "engines": { + "node": ">= 12.0.0" + }, + "peerDependencies": { + "react": ">= 16.8.0 || 17.x.x || ^18.0.0-0 || ^19.0.0-0" + }, + "peerDependenciesMeta": { + "@babel/core": { + "optional": true + }, + "babel-plugin-macros": { + "optional": true + } + } + }, + "node_modules/tailwind-merge": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/tailwind-merge/-/tailwind-merge-3.1.0.tgz", + "integrity": "sha512-aV27Oj8B7U/tAOMhJsSGdWqelfmudnGMdXIlMnk1JfsjwSjts6o8HyfN7SFH3EztzH4YH8kk6GbLTHzITJO39Q==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/dcastil" + } + }, + "node_modules/tailwindcss": { + "version": "4.0.17", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.0.17.tgz", + "integrity": "sha512-OErSiGzRa6rLiOvaipsDZvLMSpsBZ4ysB4f0VKGXUrjw2jfkJRd6kjRKV2+ZmTCNvwtvgdDam5D7w6WXsdLJZw==", + "dev": true, + "license": "MIT" + }, + "node_modules/tapable": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", + "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/tiny-invariant": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.3.tgz", + "integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==", + "license": "MIT" + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" + }, + "node_modules/tw-animate-css": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/tw-animate-css/-/tw-animate-css-1.2.5.tgz", + "integrity": "sha512-ABzjfgVo+fDbhRREGL4KQZUqqdPgvc5zVrLyeW9/6mVqvaDepXc7EvedA+pYmMnIOsUAQMwcWzNvom26J2qYvQ==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/Wombosvideo" + } + }, + "node_modules/typescript": { + "version": "5.8.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.2.tgz", + "integrity": "sha512-aJn6wq13/afZp/jT9QZmwEjDqqvSGp1VT5GVg+f/t6/oVyrgXM6BY1h9BRh/O5p3PlUPAe+WuiEZOmb/49RqoQ==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "6.19.8", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", + "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", + "dev": true, + "license": "MIT" + }, + "node_modules/universal-cookie": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/universal-cookie/-/universal-cookie-8.0.1.tgz", + "integrity": "sha512-B6ks9FLLnP1UbPPcveOidfvB9pHjP+wekP2uRYB9YDfKVpvcjKgy1W5Zj+cEXJ9KTPnqOKGfVDQBmn8/YCQfRg==", + "license": "MIT", + "dependencies": { + "cookie": "^1.0.2" + } + }, + "node_modules/victory-vendor": { + "version": "36.9.2", + "resolved": "https://registry.npmjs.org/victory-vendor/-/victory-vendor-36.9.2.tgz", + "integrity": "sha512-PnpQQMuxlwYdocC8fIJqVXvkeViHYzotI+NJrCuav0ZYFoq912ZHBk3mCeuj+5/VpodOjPe1z0Fk2ihgzlXqjQ==", + "license": "MIT AND ISC", + "dependencies": { + "@types/d3-array": "^3.0.3", + "@types/d3-ease": "^3.0.0", + "@types/d3-interpolate": "^3.0.1", + "@types/d3-scale": "^4.0.2", + "@types/d3-shape": "^3.1.0", + "@types/d3-time": "^3.0.0", + "@types/d3-timer": "^3.0.0", + "d3-array": "^3.1.6", + "d3-ease": "^3.0.1", + "d3-interpolate": "^3.0.1", + "d3-scale": "^4.0.2", + "d3-shape": "^3.1.0", + "d3-time": "^3.0.0", + "d3-timer": "^3.0.1" + } + } + } +} diff --git a/poc-frontend/package.json b/poc-frontend/package.json new file mode 100644 index 0000000..89c8c97 --- /dev/null +++ b/poc-frontend/package.json @@ -0,0 +1,40 @@ +{ + "name": "poc-fe", + "version": "0.1.0", + "private": true, + "scripts": { + "dev": "next dev", + "build": "next build", + "start": "next start", + "lint": "next lint" + }, + "dependencies": { + "axios": "^1.8.4", + "chart.js": "^4.4.8", + "class-variance-authority": "^0.7.1", + "clsx": "^2.1.1", + "lucide-react": "^0.486.0", + "motion": "^12.6.3", + "next": "15.2.4", + "react": "^19.0.0", + "react-chartjs-2": "^5.3.0", + "react-cookie": "^8.0.1", + "react-dom": "^19.0.0", + "react-grid-layout": "^1.5.1", + "react-tooltip": "^5.28.1", + "recharts": "^2.15.1", + "simplex-noise": "^4.0.3", + "tailwind-merge": "^3.1.0", + "tw-animate-css": "^1.2.5", + "universal-cookie": "^8.0.1" + }, + "devDependencies": { + "@tailwindcss/postcss": "^4", + "@types/node": "^20", + "@types/react": "^19", + "@types/react-dom": "^19", + "@types/react-grid-layout": "^1.3.5", + "tailwindcss": "^4", + "typescript": "^5" + } +} diff --git a/poc-frontend/postcss.config.mjs b/poc-frontend/postcss.config.mjs new file mode 100644 index 0000000..c7bcb4b --- /dev/null +++ b/poc-frontend/postcss.config.mjs @@ -0,0 +1,5 @@ +const config = { + plugins: ["@tailwindcss/postcss"], +}; + +export default config; diff --git a/poc-frontend/public/file.svg b/poc-frontend/public/file.svg new file mode 100644 index 0000000..004145c --- /dev/null +++ b/poc-frontend/public/file.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/poc-frontend/public/globe.svg b/poc-frontend/public/globe.svg new file mode 100644 index 0000000..567f17b --- /dev/null +++ b/poc-frontend/public/globe.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/poc-frontend/public/next.svg b/poc-frontend/public/next.svg new file mode 100644 index 0000000..5174b28 --- /dev/null +++ b/poc-frontend/public/next.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/poc-frontend/public/vercel.svg b/poc-frontend/public/vercel.svg new file mode 100644 index 0000000..7705396 --- /dev/null +++ b/poc-frontend/public/vercel.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/poc-frontend/public/window.svg b/poc-frontend/public/window.svg new file mode 100644 index 0000000..b2b2a44 --- /dev/null +++ b/poc-frontend/public/window.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/poc-frontend/src/app/dashboard/layout.tsx b/poc-frontend/src/app/dashboard/layout.tsx new file mode 100644 index 0000000..8a212a4 --- /dev/null +++ b/poc-frontend/src/app/dashboard/layout.tsx @@ -0,0 +1,9 @@ +import React, { ReactNode } from 'react'; + +interface DashboardLayoutProps { + children: ReactNode; +} + +export default function DashboardLayout({ children }: DashboardLayoutProps) { + return children; +} diff --git a/poc-frontend/src/app/dashboard/page.tsx b/poc-frontend/src/app/dashboard/page.tsx new file mode 100644 index 0000000..3f9c4e2 --- /dev/null +++ b/poc-frontend/src/app/dashboard/page.tsx @@ -0,0 +1,342 @@ +'use client'; +import React, { useState, useEffect } from "react"; +import PageSkeleton from "@/components/PageSkeleton"; +import withAuth from "@/components/withAuth"; +import { getUserProfile, getGitHubContributions, getCoverageTrends } from "@/services/api"; +import { AlertCircle, Loader2, ChevronDown, Calendar, Info, Clipboard, BarChart2, LineChart, Code2, PieChart } from 'lucide-react'; +import ActivityGraph from "@/components/ActivityGraph"; +import { CoverageHistoryChart } from "@/components/CoverageVisualizations"; + + +const DashboardPage = () => { + const [user, setUser] = useState(null); + const [loading, setLoading] = useState(true); + const [contributionsLoading, setContributionsLoading] = useState(false); + const [coverageLoading, setCoverageLoading] = useState(false); + const [error, setError] = useState(null); + const [metrics, setMetrics] = useState({ + repositories: 0, + totalTests: 0, + passRate: 0, + testsLast7Days: 0 + }); + const [activityData, setActivityData] = useState({ + dailyActivities: [], + totalCount: 0, + maxCount: 0, + repoBreakdown: [], + recentActivity: [] + }); + const [githubContributions, setGithubContributions] = useState({ + total: { lastYear: 0 }, + contributions: [] + }); + const [coverageTrends, setCoverageTrends] = useState([]); + const [selectedYear, setSelectedYear] = useState('last'); + const [yearDropdownOpen, setYearDropdownOpen] = useState(false); + const [selectedRepo, setSelectedRepo] = useState(''); + + // Generate year options: last 5 years plus "last year" option + const currentYear = new Date().getFullYear(); + const yearOptions = [ + { value: 'last', label: 'Last Year' }, + { value: currentYear.toString(), label: currentYear.toString() }, + { value: (currentYear - 1).toString(), label: (currentYear - 1).toString() }, + { value: (currentYear - 2).toString(), label: (currentYear - 2).toString() }, + { value: (currentYear - 3).toString(), label: (currentYear - 3).toString() }, + ]; + + useEffect(() => { + const fetchData = async () => { + try { + setLoading(true); + const userResponse = await getUserProfile(); + setUser(userResponse.data.user || {}); + // Store username in sessionStorage for later use + if (userResponse.data.user?.username) { + sessionStorage.setItem('github_username', userResponse.data.user.username); + } + // Fetch GitHub contributions data for initial year + await fetchGithubContributions(userResponse.data.user?.username, selectedYear); + + setMetrics({ + repositories: 12, + totalTests: 256, + passRate: 87, + testsLast7Days: 45 + }); + + setError(null); + } catch (err: any) { + console.error('Error fetching dashboard data:', err); + setError(err.response?.data?.error || 'Failed to fetch dashboard data'); + } finally { + setLoading(false); + } + }; + + fetchData(); + }, []); + + const fetchGithubContributions = async (username: string | undefined, year: string) => { + try { + setContributionsLoading(true); + if (!username) { + username = sessionStorage.getItem('github_username') || ''; + console.log('Using username from sessionStorage:', username); + } + if (!username) { + setGithubContributions({ total: { lastYear: 0 }, contributions: [] }); + return; + } + const contributionsResponse = await getGitHubContributions(username, year); + if (contributionsResponse.data) { + setGithubContributions({ + total: contributionsResponse.data.total || { lastYear: 0 }, + contributions: contributionsResponse.data.contributions || [] + }); + } + } catch (contributionsErr) { + console.error('Error fetching GitHub contributions:', contributionsErr); + } finally { + setContributionsLoading(false); + } + }; + + useEffect(() => { + const fetchCoverageData = async () => { + if (user?.repositories?.length > 0 && selectedRepo) { + try { + setCoverageLoading(true); + const coverageResponse = await getCoverageTrends(selectedRepo, 30); + setCoverageTrends(coverageResponse.data); + } catch (err) { + console.error("Failed to fetch coverage data:", err); + } finally { + setCoverageLoading(false); + } + } + }; + + fetchCoverageData(); + }, [user, selectedRepo]); + + useEffect(() => { + if (user?.repositories?.length > 0 && !selectedRepo) { + setSelectedRepo(user.repositories[0].html_url); + } + }, [user, selectedRepo]); + + const handleYearChange = (year: string) => { + setSelectedYear(year); + const username = sessionStorage.getItem('github_username') || user?.username; + fetchGithubContributions(username, year); + setYearDropdownOpen(false); + }; + + // Get the activities to display + const displayActivities = githubContributions.contributions && + githubContributions.contributions.length > 0 ? + githubContributions.contributions : activityData.dailyActivities || []; + + return ( + +
+ {error && ( +
+
+ +
+ {error} +
+ )} + + {loading ? ( +
+ + Loading dashboard metrics... +
+ ) : ( + <> + {/* Welcome message */} +
+

Welcome back, {user?.name || 'User'}!

+

Here's a summary of your GitHub API activity

+
+ + {/* Metrics cards */} +
+
+
+

Repositories

+ +
+

{metrics.repositories}

+

Connected repos

+
+ +
+
+

Total Tests

+ +
+

{metrics.totalTests}

+

API tests run

+
+ +
+
+

Pass Rate

+ +
+

{metrics.passRate}%

+

Success rate

+
+ +
+
+

Recent Tests

+ +
+

{metrics.testsLast7Days}

+

Last 7 days

+
+ + {/* Coverage Card */} +
+
+

Average Coverage

+ +
+ {coverageLoading ? ( +
+ +
+ ) : coverageTrends && coverageTrends.length > 0 ? ( + <> +
+ {(coverageTrends.reduce((sum: number, item: any) => sum + item.coverage, 0) / coverageTrends.length).toFixed(1)}% +
+

Last 30 days

+
+ +
+ + ) : ( +
No coverage data available
+ )} +
+
+ + {/* Activity Graph with Year Toggle */} +
+
+
+

+ + GitHub Contributions +

+ + {/* Year Selector Dropdown */} +
+ + + {yearDropdownOpen && ( +
+ {yearOptions.map((year) => ( + + ))} +
+ )} +
+
+ + {/* Loading state for contributions */} + {contributionsLoading ? ( +
+ + Loading contributions... +
+ ) : ( + <> +
+ {githubContributions.total?.lastYear || 0} contributions in the selected period +
+ + + )} +
+
+ + {/* Coverage History */} +
+
+
+

+ + Coverage History +

+
+ {selectedRepo && ( + + View Details → + + )} +
+
+ + {coverageLoading ? ( +
+ +
+ ) : coverageTrends && coverageTrends.length > 0 ? ( +
+ +
+ ) : ( +
+ No coverage history available +
+ )} +
+
+ + )} +
+ + +
+ ); +}; + +export default withAuth(DashboardPage); diff --git a/poc-frontend/src/app/favicon.ico b/poc-frontend/src/app/favicon.ico new file mode 100644 index 0000000..134ec31 Binary files /dev/null and b/poc-frontend/src/app/favicon.ico differ diff --git a/poc-frontend/src/app/globals.css b/poc-frontend/src/app/globals.css new file mode 100644 index 0000000..b34c712 --- /dev/null +++ b/poc-frontend/src/app/globals.css @@ -0,0 +1,214 @@ +@import "tailwindcss"; +@import "tw-animate-css"; + +@custom-variant dark (&:is(.dark *)); + +@theme inline { + --color-background: var(--background); + --color-foreground: var(--foreground); + --font-sans: var(--font-geist-sans); + --font-mono: var(--font-geist-mono); + --color-sidebar-ring: var(--sidebar-ring); + --color-sidebar-border: var(--sidebar-border); + --color-sidebar-accent-foreground: var(--sidebar-accent-foreground); + --color-sidebar-accent: var(--sidebar-accent); + --color-sidebar-primary-foreground: var(--sidebar-primary-foreground); + --color-sidebar-primary: var(--sidebar-primary); + --color-sidebar-foreground: var(--sidebar-foreground); + --color-sidebar: var(--sidebar); + --color-chart-5: var(--chart-5); + --color-chart-4: var(--chart-4); + --color-chart-3: var(--chart-3); + --color-chart-2: var(--chart-2); + --color-chart-1: var(--chart-1); + --color-ring: var(--ring); + --color-input: var(--input); + --color-border: var(--border); + --color-destructive: var(--destructive); + --color-accent-foreground: var(--accent-foreground); + --color-accent: var(--accent); + --color-muted-foreground: var(--muted-foreground); + --color-muted: var(--muted); + --color-secondary-foreground: var(--secondary-foreground); + --color-secondary: var(--secondary); + --color-primary-foreground: var(--primary-foreground); + --color-primary: var(--primary); + --color-popover-foreground: var(--popover-foreground); + --color-popover: var(--popover); + --color-card-foreground: var(--card-foreground); + --color-card: var(--card); + --radius-sm: calc(var(--radius) - 4px); + --radius-md: calc(var(--radius) - 2px); + --radius-lg: var(--radius); + --radius-xl: calc(var(--radius) + 4px); +} + +:root { + --radius: 0.625rem; + --background: oklch(1 0 0); + --foreground: oklch(0.145 0 0); + --card: oklch(1 0 0); + --card-foreground: oklch(0.145 0 0); + --popover: oklch(1 0 0); + --popover-foreground: oklch(0.145 0 0); + --primary: oklch(0.205 0 0); + --primary-foreground: oklch(0.985 0 0); + --secondary: oklch(0.97 0 0); + --secondary-foreground: oklch(0.205 0 0); + --muted: oklch(0.97 0 0); + --muted-foreground: oklch(0.556 0 0); + --accent: oklch(0.97 0 0); + --accent-foreground: oklch(0.205 0 0); + --destructive: oklch(0.577 0.245 27.325); + --border: oklch(0.922 0 0); + --input: oklch(0.922 0 0); + --ring: oklch(0.708 0 0); + --chart-1: oklch(0.646 0.222 41.116); + --chart-2: oklch(0.6 0.118 184.704); + --chart-3: oklch(0.398 0.07 227.392); + --chart-4: oklch(0.828 0.189 84.429); + --chart-5: oklch(0.769 0.188 70.08); + --sidebar: oklch(0.985 0 0); + --sidebar-foreground: oklch(0.145 0 0); + --sidebar-primary: oklch(0.205 0 0); + --sidebar-primary-foreground: oklch(0.985 0 0); + --sidebar-accent: oklch(0.97 0 0); + --sidebar-accent-foreground: oklch(0.205 0 0); + --sidebar-border: oklch(0.922 0 0); + --sidebar-ring: oklch(0.708 0 0); +} + +.dark { + --background: oklch(0.145 0 0); + --foreground: oklch(0.985 0 0); + --card: oklch(0.205 0 0); + --card-foreground: oklch(0.985 0 0); + --popover: oklch(0.205 0 0); + --popover-foreground: oklch(0.985 0 0); + --primary: oklch(0.922 0 0); + --primary-foreground: oklch(0.205 0 0); + --secondary: oklch(0.269 0 0); + --secondary-foreground: oklch(0.985 0 0); + --muted: oklch(0.269 0 0); + --muted-foreground: oklch(0.708 0 0); + --accent: oklch(0.269 0 0); + --accent-foreground: oklch(0.985 0 0); + --destructive: oklch(0.704 0.191 22.216); + --border: oklch(1 0 0 / 10%); + --input: oklch(1 0 0 / 15%); + --ring: oklch(0.556 0 0); + --chart-1: oklch(0.488 0.243 264.376); + --chart-2: oklch(0.696 0.17 162.48); + --chart-3: oklch(0.769 0.188 70.08); + --chart-4: oklch(0.627 0.265 303.9); + --chart-5: oklch(0.645 0.246 16.439); + --sidebar: oklch(0.205 0 0); + --sidebar-foreground: oklch(0.985 0 0); + --sidebar-primary: oklch(0.488 0.243 264.376); + --sidebar-primary-foreground: oklch(0.985 0 0); + --sidebar-accent: oklch(0.269 0 0); + --sidebar-accent-foreground: oklch(0.985 0 0); + --sidebar-border: oklch(1 0 0 / 10%); + --sidebar-ring: oklch(0.556 0 0); +} + +@layer base { + * { + @apply border-border outline-ring/50; + } + body { + @apply bg-background text-foreground; + } + /* Add custom scrollbar styles */ + .hide-scrollbar::-webkit-scrollbar { + display: none; + } + .hide-scrollbar { + -ms-overflow-style: none; /* IE and Edge */ + scrollbar-width: none; /* Firefox */ + } + /* Dashboard widgets */ + .dashboard-widget { + transition: all 0.3s ease; + } + + .dashboard-widget:hover { + box-shadow: 0 0 10px rgba(249, 115, 22, 0.3); + } + + /* Custom scrollbar */ + .hide-scrollbar::-webkit-scrollbar { + width: 6px; + height: 6px; + } + + .hide-scrollbar::-webkit-scrollbar-track { + background: #1e293b; + } + + .hide-scrollbar::-webkit-scrollbar-thumb { + background: #475569; + border-radius: 3px; + } + + .hide-scrollbar::-webkit-scrollbar-thumb:hover { + background: #f97316; + } + + /* React Grid Layout styles overrides */ + .react-grid-item.react-grid-placeholder { + background: rgba(249, 115, 22, 0.2) !important; + border: 1px dashed #f97316 !important; + } + + .react-resizable-handle { + background-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='10' height='10' viewBox='0 0 10 10'%3E%3Cpath d='M0 0v10h10' fill='none' stroke='%23f97316' stroke-opacity='0.5'/%3E%3C/svg%3E") !important; + background-position: bottom right; + background-repeat: no-repeat; + background-origin: content-box; + box-sizing: border-box; + opacity: 0.5; + transition: all 0.2s ease; + } + + .react-resizable-handle:hover { + opacity: 1; + } + + /* Custom animations */ + @keyframes pulse { + 0% { + box-shadow: 0 0 0 0 rgba(249, 115, 22, 0.7); + } + 70% { + box-shadow: 0 0 0 10px rgba(249, 115, 22, 0); + } + 100% { + box-shadow: 0 0 0 0 rgba(249, 115, 22, 0); + } + } + + .pulse-animation { + animation: pulse 2s infinite; + } + + /* Transition effects */ + .fade-in { + animation: fadeIn 0.3s ease-in; + } + + @keyframes fadeIn { + from { opacity: 0; } + to { opacity: 1; } + } + + .slide-in { + animation: slideIn 0.3s ease-in; + } + + @keyframes slideIn { + from { transform: translateY(10px); opacity: 0; } + to { transform: translateY(0); opacity: 1; } + } +} + diff --git a/poc-frontend/src/app/layout.tsx b/poc-frontend/src/app/layout.tsx new file mode 100644 index 0000000..f7fa87e --- /dev/null +++ b/poc-frontend/src/app/layout.tsx @@ -0,0 +1,34 @@ +import type { Metadata } from "next"; +import { Geist, Geist_Mono } from "next/font/google"; +import "./globals.css"; + +const geistSans = Geist({ + variable: "--font-geist-sans", + subsets: ["latin"], +}); + +const geistMono = Geist_Mono({ + variable: "--font-geist-mono", + subsets: ["latin"], +}); + +export const metadata: Metadata = { + title: "Create Next App", + description: "Generated by create next app", +}; + +export default function RootLayout({ + children, +}: Readonly<{ + children: React.ReactNode; +}>) { + return ( + + + {children} + + + ); +} diff --git a/poc-frontend/src/app/page.tsx b/poc-frontend/src/app/page.tsx new file mode 100644 index 0000000..b4a54a9 --- /dev/null +++ b/poc-frontend/src/app/page.tsx @@ -0,0 +1,164 @@ +"use client"; +import React, { useEffect, useState, Suspense } from "react"; +import { WavyBackground } from "@/components/ui/wavy-background"; +import { useRouter, useSearchParams } from "next/navigation"; +import { githubSignIn, githubSignUp, isAuthenticated } from "@/services/auth"; +import { GITHUB_CLIENT_ID, REDIRECT_URI } from "@/constants/routes"; + +function AuthButtons({ + loading, + error, + redirectToGitHub, +}: { + loading: boolean; + error: string | null; + redirectToGitHub: (type: 'signup' | 'signin') => void; +}) { + return ( +
+ {error && ( +
+

{error}

+
+ )} +
+ + +
+
+ ); +} + +// Move the logic that uses useSearchParams into a child component +function AuthHandler({ + router, + loading, + setLoading, + setError, + setAuthType, + error, +}: { + router: ReturnType; + loading: boolean; + setLoading: React.Dispatch>; + setError: React.Dispatch>; + setAuthType: React.Dispatch>; + error: string | null; +}) { + const searchParams = useSearchParams(); + + useEffect(() => { + if (isAuthenticated()) { + router.push('/dashboard'); + return; + } + + const code = searchParams.get('code'); + const storedAuthType = localStorage.getItem('authType') as 'signup' | 'signin' | null; + + if (code && storedAuthType) { + handleGitHubCallback(code, storedAuthType); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [router, searchParams]); + + const handleGitHubCallback = async (code: string, authType: 'signup' | 'signin') => { + setLoading(true); + setError(null); + + try { + if (authType === 'signup') { + await githubSignUp(code); + } else { + await githubSignIn(code); + } + + localStorage.removeItem('authType'); + router.push('/dashboard'); + } catch (err: any) { + console.error('Authentication error:', err); + setError(err.response?.data?.error || 'Authentication failed. Please try again.'); + } finally { + setLoading(false); + } + }; + + const redirectToGitHub = (type: 'signup' | 'signin') => { + localStorage.setItem('authType', type); + setAuthType(type); + + const githubAuthUrl = `https://github.com/login/oauth/authorize?client_id=${GITHUB_CLIENT_ID}&redirect_uri=${encodeURIComponent(REDIRECT_URI)}&scope=repo,user:email`; + window.location.href = githubAuthUrl; + }; + + if (loading) { + return ( +
+
+
+ ); + } + + return ( + + ); +} + +export default function HomePage() { + const router = useRouter(); + const [loading, setLoading] = useState(false); + const [error, setError] = useState(null); + const [authType, setAuthType] = useState<'signup' | 'signin' | null>(null); + + return ( + + Keploy Logo +

+ Keploy - Simplify Your API Testing +

+

+ Automate performance and functional testing for your APIs with ease +

+
}> + +
+
+ ); +} diff --git a/poc-frontend/src/app/repositories/coverage-tab.tsx b/poc-frontend/src/app/repositories/coverage-tab.tsx new file mode 100644 index 0000000..5f15b7a --- /dev/null +++ b/poc-frontend/src/app/repositories/coverage-tab.tsx @@ -0,0 +1,751 @@ +'use client'; + +import React, { useState, useEffect, useRef, useCallback } from "react"; +import { Repository } from '@/types/repository'; +import { runCoverageScan, getCoverageJobStatus, getCoverageHistory, getCoverageTrends, getUserRepositories, getCoverageById } from "@/services/api"; +import { AlertCircle, BarChart2, History, RefreshCw, GitBranch, GitMerge, GitCompare, Search, Loader2, CheckCircle2,} from 'lucide-react'; +import { CoverageResponse, CoverageHistory, CoverageTrend } from '@/types/coverage'; +import { + FileHeatmap, + CoverageHistoryChart, + CoverageHistoryList, + BranchComparison, + BranchCoverageList +} from '@/components/CoverageVisualizations'; +import SearchableDropdown from "@/components/SearchableDropdown"; + +interface CoverageTabProps { + repositories: Repository[]; + onRefreshRepositories?: () => Promise; + isRefreshing?: boolean; +} + +const CoverageTab: React.FC = ({ + repositories, + onRefreshRepositories, + isRefreshing = false +}) => { + + const [selectedRepo, setSelectedRepo] = useState(''); + const [scanBranch, setScanBranch] = useState(''); + const [coverageResult, setCoverageResult] = useState(null); + const [loadingCoverage, setLoadingCoverage] = useState(false); + const [coverageError, setCoverageError] = useState(null); + const [success, setSuccess] = useState(null); + const [jobId, setJobId] = useState(null); + const [jobStatus, setJobStatus] = useState<'pending' | 'in_progress' | 'completed' | 'failed' | null>(null); + + const [coverageHistory, setCoverageHistory] = useState([]); + const [coverageTrends, setCoverageTrends] = useState([]); + const [loadingHistory, setLoadingHistory] = useState(false); + const [historyError, setHistoryError] = useState(null); + const [timeframe, setTimeframe] = useState<'daily' | 'weekly' | 'monthly'>('weekly'); + + const [activeTab, setActiveTab] = useState<'scanner' | 'history' | 'branches' | 'compare'>('scanner'); + const [compareBranch1, setCompareBranch1] = useState('main'); + const [compareBranch2, setCompareBranch2] = useState('develop'); + + const [searchQuery, setSearchQuery] = useState(''); + const [searchResults, setSearchResults] = useState(repositories); + const [loadingRepos, setLoadingRepos] = useState(false); + const [searchError, setSearchError] = useState(null); + const [repoOptions, setRepoOptions] = useState<{ value: string; label: string }[]>([]); + const [skip, setSkip] = useState(0); + const [limit] = useState(50); + + const [scanSettings, setScanSettings] = useState<{ + useAsync: boolean; + cloneTimeout: number; + }>({ + useAsync: false, + cloneTimeout: 300, + }); + const [showAdvanced, setShowAdvanced] = useState(false); + + const prevSearchQueryRef = useRef(''); + const searchTimeoutRef = useRef(null); + + useEffect(() => { + const options = searchResults.map(repo => ({ + value: repo.html_url, + label: repo.name + })); + setRepoOptions(options); + }, [searchResults]); + + useEffect(() => { + if (repositories.length > 0) { + setSearchResults(repositories); + } + }, [repositories]); + + useEffect(() => { + let statusInterval: NodeJS.Timeout | undefined; + + if (jobId && jobStatus && jobStatus !== 'completed' && jobStatus !== 'failed') { + const isPolling = localStorage.getItem(`job_${jobId}_polling`); + + statusInterval = setInterval(async () => { + try { + const response = await getCoverageJobStatus(jobId); + const status = response.data.status; + + if (status === 'completed') { + if (statusInterval) { + clearInterval(statusInterval); + statusInterval = undefined; + } + setJobStatus(status); + setSuccess('Coverage scan completed successfully!'); + setLoadingCoverage(false); + localStorage.removeItem(`job_${jobId}_polling`); + if (response.data.result_id) { + try { + const resultResponse = await getCoverageById(response.data.result_id); + setCoverageResult(resultResponse.data); + } catch (resultErr) { + console.error('Failed to fetch coverage result:', resultErr); + } + } + if (selectedRepo) { + fetchCoverageHistory(selectedRepo); + } + setTimeout(() => { + setSuccess(null); + setJobId(null); + setJobStatus(null); + }, 5000); + } else if (status === 'failed') { + if (statusInterval) { + clearInterval(statusInterval); + statusInterval = undefined; + } + setJobStatus(status); + setCoverageError(`Coverage scan failed: ${response.data.error || 'Unknown error'}`); + setLoadingCoverage(false); + localStorage.removeItem(`job_${jobId}_polling`); + setTimeout(() => { + setCoverageError(null); + setJobId(null); + setJobStatus(null); + }, 5000); + } else { + setJobStatus(status); + } + } catch (err) { + console.error('Failed to get job status:', err); + const now = new Date().getTime(); + const jobStartTime = parseInt(localStorage.getItem(`job_${jobId}_start_time`) || '0'); + if (now - jobStartTime > 5 * 60 * 1000) { + console.warn('Job polling timeout reached. Stopping status checks.'); + if (statusInterval) { + clearInterval(statusInterval); + statusInterval = undefined; + } + setJobStatus('failed'); + setCoverageError('Coverage scan timed out. Please try again.'); + setLoadingCoverage(false); + localStorage.removeItem(`job_${jobId}_polling`); + setTimeout(() => { + setJobId(null); + setJobStatus(null); + setCoverageError(null); + }, 5000); + } + } + }, 3000); + if (!isPolling) { + localStorage.setItem(`job_${jobId}_start_time`, new Date().getTime().toString()); + } + } + return () => { + if (statusInterval) { + clearInterval(statusInterval); + } + }; + }, [jobId, jobStatus, selectedRepo]); + + useEffect(() => { + return () => { + if (jobId) { + localStorage.removeItem(`job_${jobId}_polling`); + } + }; + }, [jobId]); + + useEffect(() => { + if (selectedRepo) { + const repoUrl = selectedRepo.toLowerCase(); + const isLargeRepo = repoUrl.includes('kubernetes') || + repoUrl.includes('k8s'); + if (isLargeRepo && !scanSettings.useAsync) { + setScanSettings(prev => ({ + ...prev, + useAsync: true, + cloneTimeout: 600, + })); + setShowAdvanced(true); + } + } + }, [selectedRepo]); + + useEffect(() => { + if (selectedRepo) { + fetchCoverageHistory(selectedRepo); + } + }, [timeframe, selectedRepo]); + + const fetchCoverageHistory = async (repoUrl: string) => { + if (!repoUrl) return; + setLoadingHistory(true); + setHistoryError(null); + try { + const response = await getCoverageHistory(repoUrl); + setCoverageHistory(response.data); + const trendsResponse = await getCoverageTrends(repoUrl, + timeframe === 'daily' ? 30 : timeframe === 'weekly' ? 90 : 365); + setCoverageTrends(trendsResponse.data); + } catch (err: any) { + setHistoryError(err.response?.data?.error || 'Failed to fetch coverage history'); + } finally { + setLoadingHistory(false); + } + }; + + const handleRepoSearch = useCallback((query: string) => { + if (query === prevSearchQueryRef.current) { + return; + } + prevSearchQueryRef.current = query; + setSearchQuery(query); + if (searchTimeoutRef.current) { + clearTimeout(searchTimeoutRef.current); + } + setLoadingRepos(true); + setSearchError(null); + if (!query.trim() && repositories.length > 0) { + setSearchResults(repositories); + setLoadingRepos(false); + return; + } + if (query.trim().length < 3 && repositories.length > 0) { + const filtered = repositories.filter(repo => + repo.name.toLowerCase().includes(query.toLowerCase()) + ); + setSearchResults(filtered); + setLoadingRepos(false); + return; + } + searchTimeoutRef.current = setTimeout(async () => { + try { + const response = await getUserRepositories(0, limit, query); + if (response.data && response.data.repositories) { + setSearchResults(response.data.repositories); + } else { + setSearchResults([]); + } + } catch (err: any) { + console.error('Error searching repositories:', err); + setSearchError(err.message || 'Failed to search repositories'); + } finally { + setLoadingRepos(false); + } + }, 500); + }, [repositories, limit]); + + const handleCoverageScan = async () => { + if (!selectedRepo) return; + setLoadingCoverage(true); + setCoverageError(null); + setSuccess(null); + setJobId(null); + setJobStatus(null); + try { + const response = await runCoverageScan( + selectedRepo, + scanBranch || undefined, + { + async: scanSettings.useAsync, + cloneTimeout: scanSettings.cloneTimeout + } + ); + if (scanSettings.useAsync) { + setJobId(response.data.job_id); + setJobStatus('in_progress'); + localStorage.setItem(`job_${response.data.job_id}_polling`, 'true'); + } else { + setSuccess('Coverage scan completed successfully!'); + setLoadingCoverage(false); + setTimeout(() => setSuccess(null), 5000); + } + } catch (err: any) { + console.error('Error scanning coverage:', err); + setCoverageError(err.response?.data?.error || 'Failed to scan coverage. Please try again.'); + setLoadingCoverage(false); + } + }; + + const handleRepoChange = (repoUrl: string) => { + setSelectedRepo(repoUrl); + setCoverageResult(null); + setCoverageError(null); + if (repoUrl) { + fetchCoverageHistory(repoUrl); + } else { + setCoverageHistory([]); + setCoverageTrends([]); + } + }; + + const renderRepositoryDropdown = () => { + return ( +
+ + {searchError && ( +
+ Error: {searchError}. Try using the listed repositories instead. +
+ )} +
+ ); + }; + + return ( +
+ {repositories.length === 0 ? ( +
+ +

No repositories available

+

+ Connect repositories to see your repositories or refresh from GitHub +

+ {onRefreshRepositories && ( + + )} +
+ ) : ( +
+ {onRefreshRepositories && ( +
+ +
+ )} +
+ + + + +
+ {activeTab === 'scanner' && ( +
+
+ {renderRepositoryDropdown()} +
+ + setScanBranch(e.target.value)} + /> +
+ +
+
+ + {showAdvanced && ( +
+

Scan Settings

+
+
+
+ setScanSettings({...scanSettings, useAsync: !scanSettings.useAsync})} + className="sr-only" + /> +
setScanSettings({...scanSettings, useAsync: !scanSettings.useAsync})} + > +
+
+
+ +
+
+
+ setScanSettings({...scanSettings, useAsync: false})} + className="sr-only" + /> +
setScanSettings({...scanSettings, useAsync: false})} + > +
+
+
+ +
+
+ + setScanSettings({ + ...scanSettings, + cloneTimeout: parseInt(e.target.value) || 300 + })} + className="w-full max-w-xs p-2 bg-[#263544] text-white rounded-md border border-gray-700" + /> +
+
+ {selectedRepo && selectedRepo.toLowerCase().includes('kubernetes') && ( +
+
+ + + +
+

Large repository detected

+

+ This appears to be a large repository. We recommend using asynchronous mode for better performance. +

+
+
+
+ )} +
+ )} +
+ {!selectedRepo && ( +
+ +

Please select a repository to run a coverage scan

+
+ )} + {coverageError && ( +
+
+ +
+

Scan Failed

+

{coverageError}

+
+
+
+ )} + {success && ( +
+
+ +
+

Success!

+

{success}

+
+
+
+ )} + {jobId && jobStatus && jobStatus !== 'completed' && jobStatus !== 'failed' && ( +
+
+ +
+

+ Coverage scan in progress +

+
+
+ Job ID: {jobId} + Status: {jobStatus} +
+

+ Large repositories may take several minutes. You can leave this page and check results later. +

+
+
+
+
+ )} + {coverageResult && ( +
+
+
+ Total Coverage + {coverageResult.total_coverage.toFixed(2)}% +
+
+ Files Scanned + {coverageResult.files?.length ?? 0} +
+
+ {coverageResult.files && coverageResult.files.length > 0 && ( + + )} +
+
+ + { + const fileQuery = e.target.value.toLowerCase(); + }} + /> +
+
+ + + + + + + + + {(coverageResult.files ?? []).map(f => ( + + + + + ))} + +
FileCoverage
{f.file} + {f.coverage.toFixed(1)}% +
+
+
+ +
+ )} + {loadingCoverage && !coverageResult && ( +
+
+
+ )} +
+ )} + {activeTab === 'history' && ( +
+ {selectedRepo ? ( + <> +
+

+ + Coverage History +

+
+ + +
+
+ {historyError ? ( +
+ + {historyError} +
+ ) : ( + <> + +
+ { + setCoverageResult({ + total_coverage: history.total_coverage, + files: history.files + }); + setActiveTab('scanner'); + }} + /> +
+ + )} + + ) : ( +
+

Select a repository to view coverage history

+
+ )} +
+ )} + {activeTab === 'branches' && ( +
+ {selectedRepo ? ( + { + setCompareBranch1(b1); + setCompareBranch2(b2); + setActiveTab('compare'); + }} + /> + ) : ( +
+

Select a repository to view branch coverage

+
+ )} +
+ )} + {activeTab === 'compare' && ( +
+ {selectedRepo ? ( + + ) : ( +
+

Select a repository to compare branches

+
+ )} +
+ )} +
+ )} +
+ ); +}; + +export default CoverageTab; diff --git a/poc-frontend/src/app/repositories/layout.tsx b/poc-frontend/src/app/repositories/layout.tsx new file mode 100644 index 0000000..c86ce6e --- /dev/null +++ b/poc-frontend/src/app/repositories/layout.tsx @@ -0,0 +1,9 @@ +import React, { ReactNode } from 'react'; + +interface RepositoriesLayoutProps { + children: ReactNode; +} + +export default function RepositoriesLayout({ children }: RepositoriesLayoutProps) { + return children; +} diff --git a/poc-frontend/src/app/repositories/page.tsx b/poc-frontend/src/app/repositories/page.tsx new file mode 100644 index 0000000..0c38c95 --- /dev/null +++ b/poc-frontend/src/app/repositories/page.tsx @@ -0,0 +1,702 @@ +'use client'; +import React, { useState, useEffect, useRef, useCallback } from "react"; +import { Repository } from '@/types/repository'; +import PageSkeleton from "@/components/PageSkeleton"; +import withAuth from "@/components/withAuth"; +import { getUserRepositories, refreshRepositories } from "@/services/api"; +import { AlertCircle, Folder, ChevronLeft, ChevronRight, LayoutGrid, LayoutList, ChevronDown, BarChart2, RefreshCw } from 'lucide-react'; +import CoverageTab from './coverage-tab'; + + +const getLanguageColor = (language: string): string => { + const colors: Record = { + JavaScript: '#f1e05a', + TypeScript: '#2b7489', + Python: '#3572A5', + Java: '#b07219', + HTML: '#e34c26', + CSS: '#563d7c', + PHP: '#4F5D95', + Ruby: '#701516', + Go: '#00ADD8', + C: '#555555', + 'C++': '#f34b7d', + 'C#': '#178600', + Swift: '#ffac45', + Kotlin: '#F18E33', + Rust: '#dea584', + Dart: '#00B4AB', + Shell: '#89e051', + Scala: '#c22d40', + Solidity: '#AA6746', + Move: '#4bc1d2', + default: '#8f8f8f' + }; + return colors[language] || colors.default; +}; + + +interface LanguageBarProps { + languages?: Record; +} + +const LanguageBar: React.FC = ({ languages }) => { + if (!languages || Object.keys(languages).length === 0) return null; + + const sortedLanguages = Object.entries(languages) + .sort(([, percentA], [, percentB]) => Number(percentB) - Number(percentA)) + .slice(0, 4); // Show top 4 languages + + return ( +
+
Languages
+
+ {sortedLanguages.map(([lang, percent]) => ( +
+ ))} +
+
+ {sortedLanguages.map(([lang, percent]) => ( +
+
+ {lang} {percent.toFixed(1)}% +
+ ))} +
+
+ ); +}; + +const RepositoriesPage = () => { + const [repositories, setRepositories] = useState([]); + const [loading, setLoading] = useState(true); + const [loadingMore, setLoadingMore] = useState(false); + const [error, setError] = useState(null); + const [viewMode, setViewMode] = useState<'grid' | 'list'>('grid'); + const [activeTab, setActiveTab] = useState<'repositories' | 'coverage'>('repositories'); + const hasFetchedRef = useRef(false); + + const [pagination, setPagination] = useState({ + skip: 0, + limit: 10, + totalCount: 0, + currentPage: 1, + pageSize: 10, + }); + + // Add refs to track search state + const prevSearchRef = useRef(''); + const searchTimeoutRef = useRef(null); + const isLoadingRef = useRef(false); + const [isRefreshing, setIsRefreshing] = useState(false); + const [dataSource, setDataSource] = useState<'database' | 'github'>('database'); + + const fetchRepositories = async (skip = 0, limit = pagination.pageSize, append = false, search = '') => { + // Prevent duplicate requests when already loading + if (isLoadingRef.current) { + return; + } + + try { + if (append) { + setLoadingMore(true); + } else { + setLoading(true); + } + isLoadingRef.current = true; + + const response = await getUserRepositories(skip, limit, search); + const { repositories: fetchedRepos, totalCount, source } = response.data; + + if (append) { + setRepositories(prev => [...prev, ...fetchedRepos]); + } else { + setRepositories(fetchedRepos); + } + + setPagination(prev => ({ + ...prev, + skip, + limit, + totalCount, + currentPage: Math.floor(skip / limit) + 1 + })); + + setDataSource(source || 'database'); + setError(null); + } catch (err: any) { + console.error('Error fetching repositories:', err); + setError(err.response?.data?.error || 'Failed to fetch repositories. Please ensure your GitHub connection is working.'); + } finally { + setLoading(false); + setLoadingMore(false); + isLoadingRef.current = false; + } + }; + + // New function to handle complete repository refresh + const handleCompleteRefresh = async () => { + if (isRefreshing) return; + + try { + setIsRefreshing(true); + setError(null); + console.log('Starting complete repository refresh...'); + + // Pass higher limit to force using the force-refresh endpoint + const response = await refreshRepositories(0, 100, prevSearchRef.current); + console.log('Repository refresh response:', response.data); + + const { repositories: fetchedRepos, totalCount, source } = response.data; + console.log(`Received ${fetchedRepos?.length || 0} repositories from ${source}, total count: ${totalCount}`); + + if (fetchedRepos && fetchedRepos.length > 0) { + setRepositories(fetchedRepos); + setPagination(prev => ({ + ...prev, + skip: 0, + totalCount: totalCount || fetchedRepos.length, + currentPage: 1 + })); + + setDataSource(source || 'github'); + } else { + console.log('No repositories returned from refresh, will retry normal fetch'); + // Fallback to regular fetch if the force refresh doesn't return repos directly + await fetchRepositories(0, pagination.pageSize); + } + } catch (err: any) { + console.error('Error refreshing repositories:', err); + setError(err.response?.data?.error || + 'Failed to refresh repositories from GitHub. Please try again later.'); + } finally { + setIsRefreshing(false); + console.log('Repository refresh process completed'); + } + }; + + // Replace the handleRefreshRepositories function with this updated version + const handleRefreshRepositories = async () => { + // For users with many repositories, use the complete refresh + if (pagination.totalCount > 90) { + await handleCompleteRefresh(); + return; + } + + // Otherwise use the existing refresh logic + if (isRefreshing) return; + + try { + setIsRefreshing(true); + console.log('Starting repository refresh...'); + + const response = await refreshRepositories(0, pagination.pageSize, prevSearchRef.current); + console.log('Repository refresh response:', response.data); + + const { repositories: fetchedRepos, totalCount, source } = response.data; + console.log(`Received ${fetchedRepos?.length || 0} repositories from ${source}, total count: ${totalCount}`); + + setRepositories(fetchedRepos || []); + setPagination(prev => ({ + ...prev, + skip: 0, + totalCount: totalCount || 0, + currentPage: 1 + })); + + setDataSource(source || 'github'); + setError(null); + } catch (err: any) { + console.error('Error refreshing repositories:', err); + setError(err.response?.data?.error || + 'Failed to refresh repositories from GitHub. GitHub API rate limit might be exceeded.'); + } finally { + setIsRefreshing(false); + console.log('Repository refresh process completed'); + } + }; + + const handleSearch = useCallback((query: string) => { + // Skip if the query hasn't changed + if (query === prevSearchRef.current) { + return; + } + + prevSearchRef.current = query; + + // Clear any pending search + if (searchTimeoutRef.current) { + clearTimeout(searchTimeoutRef.current); + } + + // Set loading state immediately + if (query.trim().length >= 3) { + setLoading(true); + } + + // Use our debounced function instead of setTimeout + searchTimeoutRef.current = setTimeout(() => { + if (query.trim().length < 3 && repositories.length > 0) { + // Simple client-side filtering for short queries + const filtered = repositories.filter(repo => + repo.name.toLowerCase().includes(query.toLowerCase()) + ); + setRepositories(filtered); + setLoading(false); + } else { + // API search for longer queries + fetchRepositories(0, pagination.pageSize, false, query); + } + }, 500); + + }, [repositories, pagination.pageSize]); + + useEffect(() => { + if (!hasFetchedRef.current) { + hasFetchedRef.current = true; + fetchRepositories(0, pagination.pageSize); + } + + // Cleanup function to clear any pending searches + return () => { + if (searchTimeoutRef.current) { + clearTimeout(searchTimeoutRef.current); + } + }; + }, []); + + const handleLoadMore = async () => { + const newSkip = pagination.skip + pagination.limit; + if (newSkip < pagination.totalCount) { + await fetchRepositories(newSkip, pagination.pageSize, true, prevSearchRef.current); + } + }; + + const handlePageChange = async (newPage: number) => { + const newSkip = (newPage - 1) * pagination.pageSize; + await fetchRepositories(newSkip, pagination.pageSize, false, prevSearchRef.current); + }; + + const totalPages = Math.ceil(pagination.totalCount / pagination.pageSize); + + const PaginationControls = () => { + const pages = []; + const maxPagesToShow = 5; + + let startPage = Math.max(1, pagination.currentPage - Math.floor(maxPagesToShow / 2)); + let endPage = Math.min(totalPages, startPage + maxPagesToShow - 1); + + if (endPage - startPage + 1 < maxPagesToShow) { + startPage = Math.max(1, endPage - maxPagesToShow + 1); + } + + for (let i = startPage; i <= endPage; i++) { + pages.push(i); + } + + return ( +
+ + + + + {pages.map(page => ( + + ))} + + + + + + + Page {pagination.currentPage} of {totalPages} ({pagination.totalCount} repositories) + +
+ ); + }; + + return ( + +
+ {/* Tab controls */} +
+ + +
+ + {/* Active tab content */} + {activeTab === 'repositories' ? ( + <> +
+ {/* Search input */} +
+ handleSearch(e.target.value)} + /> +
+ + {/* Refresh button */} + +
+ + {/* Data source indicator */} + {!loading && !error && repositories.length > 0 && ( +
+
+ +
+ +
+ + +
+
+ )} + + {loading ? ( + viewMode === 'grid' ? ( +
+ {[...Array(6)].map((_, index) => ( +
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ))} +
+ ) : ( +
+ + + + + + + + + + + + + + {[...Array(5)].map((_, index) => ( + + + + + + + + + + ))} + +
RepositoryDescriptionLanguagesCreatedUpdatedVisibilityActions
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ) + ) : error ? ( +
+ + {error} +
+ ) : repositories.length === 0 ? ( +
+ +

No repositories found

+

Connect your GitHub account to see your repositories here

+
+ ) : viewMode === 'grid' ? ( + <> +
+ {repositories.map((repo) => ( +
+
+

{repo.name}

+ + {repo.private ? 'Private' : 'Public'} + +
+

+ {repo.description || 'No description provided'} +

+ + {/* Add language bar */} + + +
+ + Created: + {formatDate(repo.created_at)} + + + Updated: + {formatDate(repo.updated_at)} + +
+ +
+ ))} +
+ + {/* Improved Show more button for grid view */} + {repositories.length < pagination.totalCount && ( +
+ +
+ )} + + ) : ( + <> +
+ + + + + + + + + + + + + + {repositories.map((repo) => ( + + + + + + + + + + ))} + +
RepositoryDescriptionLanguagesCreatedUpdatedVisibilityActions
+ {repo.name} + +
+ {repo.description || 'No description provided'} +
+
+ {repo.languages && Object.keys(repo.languages).length > 0 ? ( +
+
+ {Object.entries(repo.languages) + .sort(([, percentA], [, percentB]) => Number(percentB) - Number(percentA)) + .slice(0, 4) + .map(([lang, percent]) => ( +
+ ))} +
+ + {Object.keys(repo.languages)[0]} + +
+ ) : ( + - + )} +
+ {formatDate(repo.created_at)} + + {formatDate(repo.updated_at)} + + + {repo.private ? 'Private' : 'Public'} + + + + View on GitHub + +
+
+ + {/* Pagination controls for list view */} + {pagination.totalCount > pagination.pageSize && ( + + )} + + )} + + ) : ( + /* Coverage tab content */ + + )} +
+
+ ); +}; + +// Helper function to format dates +function formatDate(dateString: string | undefined) { + if (!dateString) return 'N/A'; + + try { + // Parse the date string + const date = new Date(dateString); + + // Check if date is valid + if (isNaN(date.getTime())) { + console.log('Invalid date:', dateString); + return 'N/A'; + } + + // Format the date nicely + return new Intl.DateTimeFormat('en-US', { + day: '2-digit', + month: 'short', + year: 'numeric', + hour: '2-digit', + minute: '2-digit', + hour12: true + }).format(date); + } catch (e) { + console.error('Error formatting date:', e); + return 'N/A'; + } +} + +export default withAuth(RepositoriesPage); diff --git a/poc-frontend/src/app/settings/layout.tsx b/poc-frontend/src/app/settings/layout.tsx new file mode 100644 index 0000000..f71bd08 --- /dev/null +++ b/poc-frontend/src/app/settings/layout.tsx @@ -0,0 +1,9 @@ +import React, { ReactNode } from 'react'; + +interface SettingsLayoutProps { + children: ReactNode; +} + +export default function SettingsLayout({ children }: SettingsLayoutProps) { + return children; +} diff --git a/poc-frontend/src/app/settings/page.tsx b/poc-frontend/src/app/settings/page.tsx new file mode 100644 index 0000000..05797ee --- /dev/null +++ b/poc-frontend/src/app/settings/page.tsx @@ -0,0 +1,201 @@ +'use client'; +import React, { useState, useEffect } from "react"; +import PageSkeleton from "@/components/PageSkeleton"; +import withAuth from "@/components/withAuth"; +import { getUserProfile } from "@/services/api"; +import { Loader2, Save, AlertCircle, User } from 'lucide-react'; +import Image from 'next/image'; + +const SettingsPage = () => { + const [user, setUser] = useState(null); + const [loading, setLoading] = useState(true); + const [error, setError] = useState(null); + const [isSaving, setIsSaving] = useState(false); + const [formData, setFormData] = useState({ + displayName: '', + email: '', + notificationEmail: true, + notificationSlack: false + }); + + useEffect(() => { + const fetchUserProfile = async () => { + try { + setLoading(true); + const response = await getUserProfile(); + const userData = response.data.user || {}; + setUser(userData); + + // Initialize form with user data + setFormData({ + displayName: userData.name || '', + email: userData.email || '', + notificationEmail: true, + notificationSlack: false + }); + + setError(null); + } catch (err: any) { + console.error('Error fetching user profile:', err); + setError(err.response?.data?.error || 'Failed to fetch user profile'); + } finally { + setLoading(false); + } + }; + + fetchUserProfile(); + }, []); + + const handleFormChange = (e: React.ChangeEvent) => { + const { name, value, type, checked } = e.target; + + setFormData(prev => ({ + ...prev, + [name]: type === 'checkbox' ? checked : value + })); + }; + + const handleSubmit = async (e: React.FormEvent) => { + e.preventDefault(); + setIsSaving(true); + + // Simulate API call + setTimeout(() => { + // In a real application, you would make an API call to update user settings + setIsSaving(false); + alert('Settings saved successfully'); + }, 1000); + }; + + return ( + +
+ {loading ? ( +
+ +
+ ) : error ? ( +
+ + {error} +
+ ) : ( +
+ {/* Profile Section with Avatar */} +
+
+ {user && user.avatar_url ? ( + Profile + ) : ( +
+ +
+ )} +
+ +
+

{user?.name || 'User'}

+

{user?.email || 'No email available'}

+
+ GitHub User +
+
+
+ +
+
+

Profile Information

+
+
+ + +
+
+ + +

Email is managed by GitHub

+
+
+
+ + {/* Notification Preferences */} +
+

Notification Preferences

+
+
+ + +
+
+ + +
+
+
+ +
+ +
+
+
+ )} +
+
+ ); +}; + +export default withAuth(SettingsPage); diff --git a/poc-frontend/src/components/ActivityGraph.tsx b/poc-frontend/src/components/ActivityGraph.tsx new file mode 100644 index 0000000..00766bd --- /dev/null +++ b/poc-frontend/src/components/ActivityGraph.tsx @@ -0,0 +1,170 @@ +'use client'; +import React from 'react'; +import { Tooltip } from 'react-tooltip'; + +interface DailyActivity { + date: string; + count: number; + level: number; +} + +interface ActivityGraphProps { + activities: DailyActivity[]; + totalCount?: number; +} + +const ActivityGraph: React.FC = ({ activities, totalCount = 0 }) => { + const getMonths = () => { + const months: string[] = []; + const now = new Date(); + for (let i = 11; i >= 0; i--) { + const month = new Date(now); + month.setMonth(now.getMonth() - i); + const monthName = month.toLocaleString('default', { month: 'short' }); + months.push(monthName); + } + return months; + }; + + // Group activities by week + const getWeeksArray = () => { + // Create a map of all dates in the past year + const dateMap = new Map(); + const today = new Date(); + const oneYearAgo = new Date(); + oneYearAgo.setDate(today.getDate() - 365); + + // Initialize with empty activity (level 0) for all days + let currentDate = new Date(oneYearAgo); + while (currentDate <= today) { + const dateString = currentDate.toISOString().split('T')[0]; + dateMap.set(dateString, { date: dateString, count: 0, level: 0 }); + + // Move to next day + currentDate.setDate(currentDate.getDate() + 1); + } + + // Fill in actual activity data + activities.forEach(activity => { + dateMap.set(activity.date, activity); + }); + + // Group by week (7 days per row) + const weeks: DailyActivity[][] = []; + let week: DailyActivity[] = []; + + // Start from Sunday of the week that includes oneYearAgo + let startDay = new Date(oneYearAgo); + startDay.setDate(startDay.getDate() - startDay.getDay()); + + currentDate = new Date(startDay); + while (currentDate <= today) { + const dateString = currentDate.toISOString().split('T')[0]; + + // If it's the start of a new week (Sunday), create a new week array + if (currentDate.getDay() === 0 && week.length > 0) { + weeks.push([...week]); + week = []; + } + + // Add this day's activity to the current week + if (dateMap.has(dateString)) { + week.push(dateMap.get(dateString)!); + } else { + // If we don't have data for this day, add empty activity + week.push({ date: dateString, count: 0, level: 0 }); + } + + // Move to next day + currentDate.setDate(currentDate.getDate() + 1); + } + + // Add the last week if not empty + if (week.length > 0) { + weeks.push(week); + } + + return weeks; + }; + + const months = getMonths(); + const weeks = getWeeksArray(); + + const levelToColor = (level: number): string => { + switch (level) { + case 0: return 'bg-[#1F2B39] border border-[#263544]'; // Empty/no activity + case 1: return 'bg-[#FF7D2D]/20'; + case 2: return 'bg-[#FF7D2D]/40'; + case 3: return 'bg-[#FF7D2D]/70'; + case 4: return 'bg-[#FF7D2D]'; + default: return 'bg-[#1F2B39] border border-[#263544]'; + } + }; + + const formatDate = (dateString: string): string => { + const date = new Date(dateString); + return date.toLocaleDateString('en-US', { + weekday: 'long', + year: 'numeric', + month: 'long', + day: 'numeric' + }); + }; + + return ( + <> +
+ {/* Month labels row */} +
+ {months.map((month, index) => ( +
{month}
+ ))} +
+ + {/* Day of week labels column */} +
+
+ Sun + Mon + Tue + Wed + Thu + Fri + Sat +
+ + {/* Activity grid */} +
+ {weeks.map((week, weekIndex) => ( + + {week.map((day, dayIndex) => ( +
+ ))} + + ))} +
+
+ + {/* Legend */} +
+ Less +
+
+
+
+
+ More +
+
+ + + + ); +}; + +export default ActivityGraph; diff --git a/poc-frontend/src/components/ActivityList.tsx b/poc-frontend/src/components/ActivityList.tsx new file mode 100644 index 0000000..47e7d2e --- /dev/null +++ b/poc-frontend/src/components/ActivityList.tsx @@ -0,0 +1,85 @@ +'use client'; +import React from 'react'; +import { GitCommit, CheckCircle, GitPullRequest, Clock } from 'lucide-react'; + +interface Activity { + id: string; + type: string; + repoName: string; + message: string; + timestamp: string | Date; +} + +interface ActivityListProps { + activities: Activity[]; +} + +const ActivityList: React.FC = ({ activities }) => { + const getActivityIcon = (type: string) => { + switch (type) { + case 'commit': + return ; + case 'test': + return ; + case 'pull_request': + return ; + default: + return ; + } + }; + + const formatDate = (timestamp: string | Date): string => { + const date = new Date(timestamp); + const now = new Date(); + const diffTime = Math.abs(now.getTime() - date.getTime()); + const diffHours = Math.floor(diffTime / (1000 * 60 * 60)); + const diffDays = Math.floor(diffTime / (1000 * 60 * 60 * 24)); + + if (diffHours < 1) { + return 'Just now'; + } else if (diffHours < 24) { + return `${diffHours} hours ago`; + } else if (diffDays === 1) { + return 'Yesterday'; + } else { + return `${diffDays} days ago`; + } + }; + + return ( +
+
+

+ + Recent Activity +

+
+ + {activities.length === 0 ? ( +
+ No recent activities found +
+ ) : ( +
+ {activities.map((activity) => ( +
+
{getActivityIcon(activity.type)}
+
+
+ {activity.repoName} + {formatDate(activity.timestamp)} +
+

{activity.message}

+
+
+ ))} +
+ )} +
+ ); +}; + +export default ActivityList; diff --git a/poc-frontend/src/components/CoverageVisualizations/BranchComparison.tsx b/poc-frontend/src/components/CoverageVisualizations/BranchComparison.tsx new file mode 100644 index 0000000..78d524f --- /dev/null +++ b/poc-frontend/src/components/CoverageVisualizations/BranchComparison.tsx @@ -0,0 +1,227 @@ +import React, { useState, useEffect } from 'react'; +import { compareBranchCoverage } from '@/services/api'; +import { BranchCompareResult, FileDiff } from '@/types/coverage'; +import FileHeatmap from './FileHeatmap'; +import { ArrowUp, ArrowDown, Minus, AlertCircle } from 'lucide-react'; + +interface BranchComparisonProps { + repository: string; + defaultBranch1?: string; + defaultBranch2?: string; +} + +export const BranchComparison: React.FC = ({ + repository, + defaultBranch1 = 'main', + defaultBranch2 = 'develop', +}) => { + const [branch1, setBranch1] = useState(defaultBranch1); + const [branch2, setBranch2] = useState(defaultBranch2); + const [compareResult, setCompareResult] = useState(null); + const [isLoading, setIsLoading] = useState(false); + const [error, setError] = useState(null); + const [availableBranches, setAvailableBranches] = useState([defaultBranch1, defaultBranch2]); + + useEffect(() => { + const fetchBranches = async () => { + try { + setAvailableBranches(['main', 'develop', 'feature/coverage', 'bugfix/tests']); + } catch (err) { + console.error('Error fetching branches:', err); + } + }; + fetchBranches(); + }, [repository]); + + const handleCompare = async () => { + if (branch1 === branch2) { + setError('Please select two different branches to compare'); + return; + } + setIsLoading(true); + setError(null); + try { + const response = await compareBranchCoverage(repository, branch1, branch2); + setCompareResult(response.data); + } catch (err) { + console.error('Error comparing branches:', err); + setError('Failed to compare branches. Please try again.'); + } finally { + setIsLoading(false); + } + }; + + const getColorForDiff = (diff: number) => { + if (diff > 5) return 'text-green-500'; + if (diff > 0) return 'text-green-400'; + if (diff < -5) return 'text-red-500'; + if (diff < 0) return 'text-red-400'; + return 'text-gray-500'; + }; + + const getDiffIcon = (diff: number) => { + if (diff > 0) return ; + if (diff < 0) return ; + return ; + }; + + const renderFileDiffs = () => { + if (!compareResult?.file_diffs?.length) return null; + const sortedDiffs = [...compareResult.file_diffs].sort((a, b) => { + return Math.abs(b.diff) - Math.abs(a.diff); + }); + return ( +
+

File Coverage Differences

+
+ + + + + + + + + + + {sortedDiffs.slice(0, 50).map((diff, index) => ( + + + + + + + ))} + +
File{branch1} (%){branch2} (%)Difference
+ {diff.file} + + {diff.branch1.toFixed(1)}% + + {diff.branch2.toFixed(1)}% + + {diff.diff > 0 ? '+' : ''}{diff.diff.toFixed(1)}% +
+
+ {compareResult.file_diffs.length > 50 && ( +
+ Showing 50 of {compareResult.file_diffs.length} files with the largest differences. +
+ )} +
+ ); + }; + + return ( +
+

Branch Coverage Comparison

+
+
+ + +
+
+ + +
+
+ +
+
+ {error && ( +
+ + {error} +
+ )} + {isLoading && ( +
+
+
+ )} + {compareResult && !isLoading && ( +
+
+
+

Coverage Summary

+
+
+

Branch 1: {compareResult.branch1}

+

{compareResult.coverage1.toFixed(1)}%

+

+ {new Date(compareResult.branch1_date).toLocaleDateString()} +

+ {compareResult.branch1_commit && ( +

+ {compareResult.branch1_commit.substring(0, 7)} +

+ )} +
+
+

Branch 2: {compareResult.branch2}

+

{compareResult.coverage2.toFixed(1)}%

+

+ {new Date(compareResult.branch2_date).toLocaleDateString()} +

+ {compareResult.branch2_commit && ( +

+ {compareResult.branch2_commit.substring(0, 7)} +

+ )} +
+
+
+ {getDiffIcon(compareResult.coverage_diff)} + + {compareResult.coverage_diff > 0 && '+'} + {compareResult.coverage_diff.toFixed(1)}% + {compareResult.diff_label === 'better' && ' improvement'} + {compareResult.diff_label === 'worse' && ' decline'} + {compareResult.diff_label === 'same' && ' no change'} + +
+
+
+

Coverage Distribution

+ ({ + file: f.file, + coverage: f.branch2 + }))} /> +
+
+ {renderFileDiffs()} +
+ )} +
+ ); +}; + +export default BranchComparison; diff --git a/poc-frontend/src/components/CoverageVisualizations/BranchCoverageList.tsx b/poc-frontend/src/components/CoverageVisualizations/BranchCoverageList.tsx new file mode 100644 index 0000000..356b7b5 --- /dev/null +++ b/poc-frontend/src/components/CoverageVisualizations/BranchCoverageList.tsx @@ -0,0 +1,266 @@ +import React, { useState, useEffect } from 'react'; +import { getBranchCoverage, scanMultipleBranches } from '@/services/api'; +import { BranchCoverage, MultiBranchScanResult } from '@/types/coverage'; +import { AlertCircle, Check, AlertTriangle, Clock } from 'lucide-react'; + +interface BranchCoverageListProps { + repository: string; + onBranchSelect?: (branch1: string, branch2: string) => void; +} + +export const BranchCoverageList: React.FC = ({ + repository, + onBranchSelect +}) => { + const [branches, setBranches] = useState([]); + const [loading, setLoading] = useState(false); + const [error, setError] = useState(null); + const [isScanning, setIsScanning] = useState(false); + const [scanResult, setScanResult] = useState(null); + const [availableBranches, setAvailableBranches] = useState([]); + const [selectedBranches, setSelectedBranches] = useState([]); + + useEffect(() => { + if (!repository) return; + + fetchBranchCoverage(); + setAvailableBranches(['main', 'develop', 'feature/coverage', 'bugfix/tests']); + }, [repository]); + + const fetchBranchCoverage = async () => { + try { + setLoading(true); + setError(null); + const response = await getBranchCoverage(repository); + setBranches(response.data); + } catch (err) { + console.error('Error fetching branch coverage:', err); + setError('Failed to load branch coverage data'); + } finally { + setLoading(false); + } + }; + + const handleScanBranches = async () => { + if (selectedBranches.length === 0) { + setError('Please select at least one branch to scan'); + return; + } + + try { + setIsScanning(true); + setError(null); + const response = await scanMultipleBranches(repository, selectedBranches); + setScanResult(response.data); + + await fetchBranchCoverage(); + } catch (err) { + console.error('Error scanning branches:', err); + setError('Failed to scan branches'); + } finally { + setIsScanning(false); + } + }; + + const toggleBranchSelection = (branch: string) => { + if (selectedBranches.includes(branch)) { + setSelectedBranches(selectedBranches.filter(b => b !== branch)); + } else { + setSelectedBranches([...selectedBranches, branch]); + } + }; + + const getStatusColor = (status: string) => { + switch (status) { + case 'success': return 'text-green-500'; + case 'failed': return 'text-red-500'; + case 'pending': return 'text-yellow-500'; + case 'timeout': return 'text-orange-500'; + default: return 'text-gray-500'; + } + }; + + const getStatusIcon = (status: string) => { + switch (status) { + case 'success': return ; + case 'failed': return ; + case 'pending': return ; + case 'timeout': return ; + default: return null; + } + }; + + const formatDate = (isoDate: string) => { + try { + return new Date(isoDate).toLocaleString(); + } catch (e) { + return isoDate; + } + }; + + const formatBranchName = (branch: string) => { + if (branch.length > 30) { + return branch.substring(0, 27) + '...'; + } + return branch; + }; + + return ( +
+
+

Branch Coverage

+
+ +
+
+ + {error && ( +
+ + {error} +
+ )} + +
+

Scan Branches

+ +
+ {availableBranches.map(branch => ( + + ))} +
+ + +
+ + {scanResult && ( +
+

Scan Results

+
+ Successfully scanned {scanResult.successful} of {scanResult.total_scanned} branches +
+ +
+ + + + + + + + + + {scanResult.branches.map((branch, index) => ( + + + + + + ))} + +
BranchStatusCoverage
+ {formatBranchName(branch.branch)} + + {getStatusIcon(branch.status)} + {branch.status} + {branch.error && ( + + {branch.error.length > 30 ? branch.error.substring(0, 27) + '...' : branch.error} + + )} + + {branch.coverage !== undefined ? `${branch.coverage.toFixed(1)}%` : '-'} +
+
+
+ )} + +
+

Coverage by Branch

+ + {loading ? ( +
+
+
+ ) : branches.length === 0 ? ( +
+

No branch coverage data available. Scan branches to generate coverage reports.

+
+ ) : ( +
+ + + + + + + + + + + {branches.map((branch, index) => ( + + + + + + + ))} + +
BranchCoverageLast ScannedActions
+ {formatBranchName(branch.branch)} + {branch.commit_hash && ( + + {branch.commit_hash.substring(0, 7)} + + )} + + {branch.total_coverage.toFixed(1)}% + + {formatDate(branch.timestamp)} + + {onBranchSelect && branches.length > 1 && ( +
+ + +
+ )} +
+
+ )} +
+
+ ); +}; + +export default BranchCoverageList; diff --git a/poc-frontend/src/components/CoverageVisualizations/CoverageCard.tsx b/poc-frontend/src/components/CoverageVisualizations/CoverageCard.tsx new file mode 100644 index 0000000..c91bece --- /dev/null +++ b/poc-frontend/src/components/CoverageVisualizations/CoverageCard.tsx @@ -0,0 +1,74 @@ +'use client'; + +import React from 'react'; +import { CoverageHistory } from '@/types/coverage'; +import { CalendarDays, GitBranch, ArrowRight } from 'lucide-react'; + +interface CoverageCardProps { + coverageData: CoverageHistory; + onClick?: () => void; +} + +const CoverageCard: React.FC = ({ coverageData, onClick }) => { + const formatRepoName = (repoUrl: string): string => { + if (!repoUrl) return 'Unknown Repository'; + const parts = repoUrl.split('/'); + return parts[parts.length - 1].replace('.git', ''); + }; + + const formatDate = (dateString: string): string => { + try { + const date = new Date(dateString); + return date.toLocaleString('en-US', { + day: '2-digit', + month: 'short', + year: 'numeric', + hour: '2-digit', + minute: '2-digit' + }); + } catch (e) { + return dateString; + } + }; + + const getCoverageColorClass = (coverage: number): string => { + if (coverage >= 80) return 'text-green-500'; + if (coverage >= 60) return 'text-green-600'; + if (coverage >= 40) return 'text-yellow-500'; + if (coverage >= 20) return 'text-orange-500'; + return 'text-red-500'; + }; + + return ( +
+
+

+ {formatRepoName(coverageData.repository)} +

+ + {coverageData.total_coverage.toFixed(1)}% + +
+ +
+
+ + {coverageData.branch || 'default'} +
+
+ + {formatDate(coverageData.timestamp)} +
+
+ +
+ View Details +
+
+ ); +}; + +export default CoverageCard; diff --git a/poc-frontend/src/components/CoverageVisualizations/CoverageHistoryChart.tsx b/poc-frontend/src/components/CoverageVisualizations/CoverageHistoryChart.tsx new file mode 100644 index 0000000..f4caace --- /dev/null +++ b/poc-frontend/src/components/CoverageVisualizations/CoverageHistoryChart.tsx @@ -0,0 +1,69 @@ +'use client'; + +import React from 'react'; +import { CoverageTrend } from '@/types/coverage'; +import { LineChart, Line, XAxis, YAxis, CartesianGrid, Tooltip, ResponsiveContainer } from 'recharts'; + +interface CoverageHistoryChartProps { + data: CoverageTrend[]; + height?: number; +} + +const CoverageHistoryChart: React.FC = ({ + data, + height = 300 +}) => { + if (!data || data.length === 0) { + return ( +
+

No historical data available

+
+ ); + } + + const formattedData = data.map(item => ({ + ...item, + formattedDate: new Date(item.date).toLocaleDateString() + })); + + return ( +
+

Coverage History

+ + + + + `${value}%`} + /> + [`${value}%`, 'Coverage']} + labelFormatter={(label) => `Date: ${label}`} + /> + + + +
+ ); +}; + +export default CoverageHistoryChart; diff --git a/poc-frontend/src/components/CoverageVisualizations/CoverageHistoryList.tsx b/poc-frontend/src/components/CoverageVisualizations/CoverageHistoryList.tsx new file mode 100644 index 0000000..cb61fd2 --- /dev/null +++ b/poc-frontend/src/components/CoverageVisualizations/CoverageHistoryList.tsx @@ -0,0 +1,144 @@ +'use client'; + +import React, { useState, useEffect } from 'react'; +import { CoverageHistory } from '@/types/coverage'; +import { Search, GitBranch, Calendar, Code } from 'lucide-react'; + +interface CoverageHistoryListProps { + coverageHistory: CoverageHistory[]; + onSelectHistory: (history: CoverageHistory) => void; +} + +const CoverageHistoryList: React.FC = ({ + coverageHistory, + onSelectHistory +}) => { + const [searchQuery, setSearchQuery] = useState(''); + const [filteredHistory, setFilteredHistory] = useState(coverageHistory); + + useEffect(() => { + if (!searchQuery.trim()) { + setFilteredHistory(coverageHistory); + } else { + const query = searchQuery.toLowerCase(); + const filtered = coverageHistory.filter(item => + (item.branch && item.branch.toLowerCase().includes(query)) || + (item.commit_hash && item.commit_hash.toLowerCase().includes(query)) + ); + setFilteredHistory(filtered); + } + }, [searchQuery, coverageHistory]); + + const formatDate = (dateString: string) => { + try { + const date = new Date(dateString); + return date.toLocaleString('en-US', { + day: '2-digit', + month: 'short', + year: 'numeric', + hour: '2-digit', + minute: '2-digit' + }); + } catch (e) { + return dateString; + } + }; + + // Calculate coverage color class + const getCoverageColorClass = (coverage: number): string => { + if (coverage >= 80) return 'text-green-500'; + if (coverage >= 60) return 'text-green-600'; + if (coverage >= 40) return 'text-yellow-500'; + if (coverage >= 20) return 'text-orange-500'; + return 'text-red-500'; + }; + + return ( +
+
+

Coverage History

+
+
+ + setSearchQuery(e.target.value)} + /> +
+
+
+ + {filteredHistory.length === 0 ? ( +
+

+ {searchQuery ? 'No matching coverage history found' : 'No coverage history available'} +

+
+ ) : ( +
+ + + + + + + + + + + + {filteredHistory.map((history, index) => ( + + + + + + + + ))} + +
BranchCommitCoverageScannedActions
+
+ + {history.branch || 'default'} +
+
+ {history.commit_hash ? ( +
+ + {history.commit_hash.substring(0, 8)} +
+ ) : ( + '-' + )} +
+ {history.total_coverage.toFixed(1)}% + +
+ + {formatDate(history.timestamp)} +
+
+ +
+
+ )} + + {searchQuery && filteredHistory.length > 0 && ( +
+ Found {filteredHistory.length} {filteredHistory.length === 1 ? 'result' : 'results'} +
+ )} +
+ ); +}; + +export default CoverageHistoryList; diff --git a/poc-frontend/src/components/CoverageVisualizations/FileHeatmap.tsx b/poc-frontend/src/components/CoverageVisualizations/FileHeatmap.tsx new file mode 100644 index 0000000..9f9fd9b --- /dev/null +++ b/poc-frontend/src/components/CoverageVisualizations/FileHeatmap.tsx @@ -0,0 +1,116 @@ +'use client'; + +import React, { useMemo, useState, useEffect } from 'react'; +import { FileCoverage } from '@/types/coverage'; +import { Search } from 'lucide-react'; + +interface FileHeatmapProps { + files: FileCoverage[]; +} + +const FileHeatmap: React.FC = ({ files }) => { + const [searchQuery, setSearchQuery] = useState(''); + const [filteredFiles, setFilteredFiles] = useState([]); + + // Generate color based on coverage percentage + const getHeatmapColor = (coverage: number): string => { + if (coverage >= 80) return 'bg-green-500'; + if (coverage >= 60) return 'bg-green-600'; + if (coverage >= 40) return 'bg-yellow-500'; + if (coverage >= 20) return 'bg-orange-500'; + return 'bg-red-500'; + }; + + // Get file basename from path + const getFileName = (filePath: string): string => { + const parts = filePath.split('/'); + return parts[parts.length - 1]; + }; + + // Sort and filter files based on search query + useEffect(() => { + if (!files) { + setFilteredFiles([]); + return; + } + + let result = [...files]; + if (searchQuery) { + const query = searchQuery.toLowerCase(); + result = result.filter(file => + file.file.toLowerCase().includes(query) + ); + } + result.sort((a, b) => a.coverage - b.coverage); + setFilteredFiles(result); + }, [searchQuery, files]); + + if (!files || files.length === 0) { + return ( +
+

File Coverage Heatmap

+

No file coverage data available

+
+ ); + } + + return ( +
+
+

File Coverage Heatmap

+
+
+ + setSearchQuery(e.target.value)} + /> +
+
+
+ + {filteredFiles.length === 0 ? ( +

+ {searchQuery ? 'No matching files found' : 'No files to display'} +

+ ) : ( + <> +
+ {filteredFiles.map((file) => ( +
+
+
+
+
+ {getFileName(file.file)} +
+
+ {file.coverage.toFixed(1)}% +
+
+
+ {file.file} +
+
+
+ ))} +
+ + {searchQuery && ( +
+ Showing {filteredFiles.length} of {files.length} files +
+ )} + + )} +
+ ); +}; + +export default FileHeatmap; diff --git a/poc-frontend/src/components/CoverageVisualizations/index.ts b/poc-frontend/src/components/CoverageVisualizations/index.ts new file mode 100644 index 0000000..9e8d428 --- /dev/null +++ b/poc-frontend/src/components/CoverageVisualizations/index.ts @@ -0,0 +1,6 @@ +export { default as CoverageHistoryChart } from './CoverageHistoryChart'; +export { default as FileHeatmap } from './FileHeatmap'; +export { default as CoverageCard } from './CoverageCard'; +export { default as CoverageHistoryList } from './CoverageHistoryList'; +export { default as BranchComparison } from './BranchComparison'; +export { default as BranchCoverageList } from './BranchCoverageList'; diff --git a/poc-frontend/src/components/LogoutButton.tsx b/poc-frontend/src/components/LogoutButton.tsx new file mode 100644 index 0000000..7a05b3e --- /dev/null +++ b/poc-frontend/src/components/LogoutButton.tsx @@ -0,0 +1,29 @@ +'use client'; + +import React from 'react'; +import { signOut } from '@/services/auth'; + +interface LogoutButtonProps { + className?: string; + children?: React.ReactNode; +} + +const LogoutButton: React.FC = ({ + className = "text-white hover:text-gray-300", + children +}) => { + const handleLogout = () => { + signOut(); + }; + + return ( + + ); +}; + +export default LogoutButton; diff --git a/poc-frontend/src/components/PageSkeleton.tsx b/poc-frontend/src/components/PageSkeleton.tsx new file mode 100644 index 0000000..4a79f97 --- /dev/null +++ b/poc-frontend/src/components/PageSkeleton.tsx @@ -0,0 +1,120 @@ +'use client'; +import React, { ReactNode, useState, useEffect } from 'react'; +import { usePathname, useRouter } from 'next/navigation'; +import Sidebar from '@/components/Sidebar'; +import { UserCircle } from 'lucide-react'; +import Image from 'next/image'; +import { getUserProfile } from '@/services/api'; + +interface PageSkeletonProps { + children: ReactNode; + title: string; + subtitle?: string; +} + +const PageSkeleton: React.FC = ({ + children, + title, + subtitle +}) => { + const [sidebarCollapsed, setSidebarCollapsed] = useState(false); + const [activeTab, setActiveTab] = useState<'metrics' | 'repositories' | 'tests' | 'settings'>('metrics'); + const [userProfile, setUserProfile] = useState(null); + const [loading, setLoading] = useState(true); + + const pathname = usePathname(); + const router = useRouter(); + + // Set the active tab based on pathname + useEffect(() => { + if (pathname === '/dashboard') { + setActiveTab('metrics'); + } else if (pathname === '/repositories') { + setActiveTab('repositories'); + } else if (pathname === '/settings') { + setActiveTab('settings'); + } + }, [pathname]); + + useEffect(() => { + const fetchUserProfile = async () => { + try { + const response = await getUserProfile(); + if (response.data && response.data.user) { + setUserProfile(response.data.user); + } + } catch (error) { + console.error('Error fetching user profile:', error); + } finally { + setLoading(false); + } + }; + + fetchUserProfile(); + }, []); + + const toggleSidebar = () => { + setSidebarCollapsed(!sidebarCollapsed); + }; + + const handleTabChange = (tab: 'metrics' | 'repositories' | 'tests' | 'settings') => { + setActiveTab(tab); + switch (tab) { + case 'metrics': + router.push('/dashboard'); + break; + case 'repositories': + router.push('/repositories'); + break; + case 'settings': + router.push('/settings'); + break; + default: + break; + } + }; + + const mainContentClass = sidebarCollapsed + ? "ml-14 transition-all duration-300 ease-in-out" + : "ml-56 transition-all duration-300 ease-in-out"; + + return ( +
+ +
+
+
+

{title}

+ {subtitle && ( +

{subtitle}

+ )} +
+
+ {!loading && userProfile && userProfile.avatar_url ? ( +
+ Profile +
+ ) : ( + + )} +
+
+
+ {children} +
+
+
+ ); +}; + +export default PageSkeleton; diff --git a/poc-frontend/src/components/SearchableDropdown.tsx b/poc-frontend/src/components/SearchableDropdown.tsx new file mode 100644 index 0000000..3fc0114 --- /dev/null +++ b/poc-frontend/src/components/SearchableDropdown.tsx @@ -0,0 +1,164 @@ +import React, { useState, useEffect, useRef } from 'react'; +import { ChevronDown, Search, X, AlertCircle } from 'lucide-react'; + +interface Option { + value: string; + label: string; +} + +interface SearchableDropdownProps { + options: Option[]; + value: string; + onChange: (value: string) => void; + onSearch: (query: string) => void; + placeholder?: string; + searchPlaceholder?: string; + label?: string; + loading?: boolean; + error?: string | null; +} + +const SearchableDropdown: React.FC = ({ + options, + value, + onChange, + onSearch, + placeholder = 'Select an option', + searchPlaceholder = 'Search...', + label, + loading = false, + error = null, +}) => { + const [isOpen, setIsOpen] = useState(false); + const [searchText, setSearchText] = useState(''); + const dropdownRef = useRef(null); + const searchInputRef = useRef(null); + const [debouncedSearch, setDebouncedSearch] = useState(''); + const debounceTimerRef = useRef(null); + + // Handle click outside of dropdown + useEffect(() => { + const handleClickOutside = (event: MouseEvent) => { + if (dropdownRef.current && !dropdownRef.current.contains(event.target as Node)) { + setIsOpen(false); + } + }; + + document.addEventListener('mousedown', handleClickOutside); + return () => { + document.removeEventListener('mousedown', handleClickOutside); + }; + }, []); + + // Focus search input when dropdown opens + useEffect(() => { + if (isOpen) { + searchInputRef.current?.focus(); + } + }, [isOpen]); + + // Debounced search + useEffect(() => { + if (debounceTimerRef.current) { + clearTimeout(debounceTimerRef.current); + } + + debounceTimerRef.current = setTimeout(() => { + setDebouncedSearch(searchText); + if (searchText.trim()) { + onSearch(searchText); + } + }, 300); // 300ms debounce + + return () => { + if (debounceTimerRef.current) { + clearTimeout(debounceTimerRef.current); + } + }; + }, [searchText, onSearch]); + + // Get selected option label + const selectedOption = options.find(option => option.value === value); + const displayText = selectedOption ? selectedOption.label : placeholder; + + return ( +
+ {label &&
{label}
} + +
setIsOpen(!isOpen)} + className={`flex justify-between items-center p-2 bg-[#263544] border ${error ? 'border-red-600' : 'border-gray-700'} text-white rounded-md cursor-pointer`} + > +
{displayText}
+ +
+ + {isOpen && ( +
+
+ + setSearchText(e.target.value)} + placeholder={searchPlaceholder} + className="bg-transparent text-white w-full focus:outline-none" + /> + {searchText && ( + + )} +
+ + {error && ( +
+ + {error} +
+ )} + + {loading ? ( +
+
+ Searching... +
+ ) : options.length === 0 ? ( +
No options available
+ ) : ( +
    + {options.map(option => ( +
  • { + onChange(option.value); + setIsOpen(false); + }} + > + {option.label} +
  • + ))} +
+ )} +
+ )} + + {error && !isOpen && ( +
+ + {error} +
+ )} +
+ ); +}; + +export default SearchableDropdown; diff --git a/poc-frontend/src/components/Sidebar.tsx b/poc-frontend/src/components/Sidebar.tsx new file mode 100644 index 0000000..5d7b332 --- /dev/null +++ b/poc-frontend/src/components/Sidebar.tsx @@ -0,0 +1,65 @@ +import React from 'react'; +import { ChevronsLeft, ChevronsRight, BarChart2, GitBranch, ClipboardCheck, Settings, LogOut } from 'lucide-react'; +import LogoutButton from './LogoutButton'; + +type SidebarProps = { + sidebarCollapsed: boolean; + activeTab: 'metrics' | 'repositories' | 'tests' | 'settings'; + toggleSidebar: () => void; + handleTabChange: (tab: 'metrics' | 'repositories' | 'tests' | 'settings') => void; +}; + +const Sidebar: React.FC = ({ sidebarCollapsed, activeTab, toggleSidebar, handleTabChange }) => { + return ( +
+
+ {!sidebarCollapsed &&
Keploy
} + +
+
    +
  • handleTabChange('metrics')} + > + + {!sidebarCollapsed && Metrics} +
  • +
  • handleTabChange('repositories')} + > + + {!sidebarCollapsed && Repositories} +
  • +
  • handleTabChange('settings')} + > + + {!sidebarCollapsed && Settings} +
  • +
+ +
+ + + {!sidebarCollapsed && Sign Out} + +
+
+ ); +}; + +export default Sidebar; diff --git a/poc-frontend/src/components/magicui/shine-border.tsx b/poc-frontend/src/components/magicui/shine-border.tsx new file mode 100644 index 0000000..5c3e828 --- /dev/null +++ b/poc-frontend/src/components/magicui/shine-border.tsx @@ -0,0 +1,61 @@ +"use client"; + +import * as React from "react"; + +import { cn } from "@/lib/utils"; + +interface ShineBorderProps extends React.HTMLAttributes { + /** + * Width of the border in pixels + * @default 1 + */ + borderWidth?: number; + /** + * Duration of the animation in seconds + * @default 14 + */ + duration?: number; + /** + * Color of the border, can be a single color or an array of colors + * @default "#000000" + */ + shineColor?: string | string[]; +} + +/** + * Shine Border + * + * An animated background border effect component with configurable properties. + */ +export function ShineBorder({ + borderWidth = 1, + duration = 14, + shineColor = "#f97316", + className, + style, + ...props +}: ShineBorderProps) { + return ( +
+ ); +} diff --git a/poc-frontend/src/components/ui/wavy-background.tsx b/poc-frontend/src/components/ui/wavy-background.tsx new file mode 100644 index 0000000..3f0f1a3 --- /dev/null +++ b/poc-frontend/src/components/ui/wavy-background.tsx @@ -0,0 +1,132 @@ +"use client"; +import { cn } from "@/lib/utils"; +import React, { useEffect, useRef, useState } from "react"; +import { createNoise3D } from "simplex-noise"; + +export const WavyBackground = ({ + children, + className, + containerClassName, + colors, + waveWidth, + backgroundFill, + blur = 10, + speed = "fast", + waveOpacity = 0.5, + ...props +}: { + children?: any; + className?: string; + containerClassName?: string; + colors?: string[]; + waveWidth?: number; + backgroundFill?: string; + blur?: number; + speed?: "slow" | "fast"; + waveOpacity?: number; + [key: string]: any; +}) => { + const noise = createNoise3D(); + let w: number, + h: number, + nt: number, + i: number, + x: number, + ctx: any, + canvas: any; + const canvasRef = useRef(null); + const getSpeed = () => { + switch (speed) { + case "slow": + return 0.001; + case "fast": + return 0.002; + default: + return 0.001; + } + }; + + const init = () => { + canvas = canvasRef.current; + ctx = canvas.getContext("2d"); + w = ctx.canvas.width = window.innerWidth; + h = ctx.canvas.height = window.innerHeight; + ctx.filter = `blur(${blur}px)`; + nt = 0; + window.onresize = function () { + w = ctx.canvas.width = window.innerWidth; + h = ctx.canvas.height = window.innerHeight; + ctx.filter = `blur(${blur}px)`; + }; + render(); + }; + + const waveColors = colors ?? [ + "#f97316", + "#fb923c", + "#fbbf24", + "#f59e0b", + "#fcd34d", + ]; + const drawWave = (n: number) => { + nt += getSpeed(); + for (i = 0; i < n; i++) { + ctx.beginPath(); + ctx.lineWidth = waveWidth || 50; + ctx.strokeStyle = waveColors[i % waveColors.length]; + for (x = 0; x < w; x += 5) { + var y = noise(x / 800, 0.3 * i, nt) * 100; + ctx.lineTo(x, y + h * 0.5); // adjust for height, currently at 50% of the container + } + ctx.stroke(); + ctx.closePath(); + } + }; + + let animationId: number; + const render = () => { + ctx.fillStyle = backgroundFill || "black"; + ctx.globalAlpha = waveOpacity || 0.5; + ctx.fillRect(0, 0, w, h); + drawWave(5); + animationId = requestAnimationFrame(render); + }; + + useEffect(() => { + init(); + return () => { + cancelAnimationFrame(animationId); + }; + }, []); + + const [isSafari, setIsSafari] = useState(false); + useEffect(() => { + // I'm sorry but i have got to support it on safari. + setIsSafari( + typeof window !== "undefined" && + navigator.userAgent.includes("Safari") && + !navigator.userAgent.includes("Chrome") + ); + }, []); + + return ( +
+ +
+ {children} +
+
+ ); +}; diff --git a/poc-frontend/src/components/withAuth.tsx b/poc-frontend/src/components/withAuth.tsx new file mode 100644 index 0000000..6dfa25b --- /dev/null +++ b/poc-frontend/src/components/withAuth.tsx @@ -0,0 +1,29 @@ +'use client'; + +import { useEffect } from 'react'; +import { useRouter } from 'next/navigation'; +import { isAuthenticated } from '@/services/auth'; + +export function withAuth

(Component: React.ComponentType

) { + return function ProtectedRoute(props: P) { + const router = useRouter(); + + useEffect(() => { + // Check if user is authenticated + if (!isAuthenticated()) { + // If not authenticated, redirect to login page + router.push('/'); + } + }, [router]); + + // If user is authenticated, render the protected component + if (isAuthenticated()) { + return ; + } + + // Return null while checking authentication or redirecting + return null; + }; +} + +export default withAuth; diff --git a/poc-frontend/src/constants/routes.ts b/poc-frontend/src/constants/routes.ts new file mode 100644 index 0000000..793926e --- /dev/null +++ b/poc-frontend/src/constants/routes.ts @@ -0,0 +1,7 @@ + +export const API_BASE_URL = process.env.NEXT_PUBLIC_API_BASE_URL || "http://localhost:8080"; +export const WS_URL = "wss://poc-backend2.azurewebsites.net"; +export const RECONNECT_DELAY = 5000; + +export const GITHUB_CLIENT_ID = process.env.NEXT_PUBLIC_GITHUB_CLIENT_ID +export const REDIRECT_URI = typeof window !== 'undefined' ? `${window.location.origin}` : ""; diff --git a/poc-frontend/src/lib/utils.ts b/poc-frontend/src/lib/utils.ts new file mode 100644 index 0000000..bd0c391 --- /dev/null +++ b/poc-frontend/src/lib/utils.ts @@ -0,0 +1,6 @@ +import { clsx, type ClassValue } from "clsx" +import { twMerge } from "tailwind-merge" + +export function cn(...inputs: ClassValue[]) { + return twMerge(clsx(inputs)) +} diff --git a/poc-frontend/src/services/api.ts b/poc-frontend/src/services/api.ts new file mode 100644 index 0000000..6fe0620 --- /dev/null +++ b/poc-frontend/src/services/api.ts @@ -0,0 +1,257 @@ +import axios from 'axios'; +import { getAuthToken } from '../utils/cookies'; +import { API_BASE_URL } from '@/constants/routes'; + +const debounce = any>( + func: F, + waitFor: number +): (...args: Parameters) => void => { + let timeout: ReturnType | null = null; + + return (...args: Parameters): void => { + if (timeout !== null) { + clearTimeout(timeout); + } + timeout = setTimeout(() => func(...args), waitFor); + }; +}; + +const requestCache: Record}> = {}; +const CACHE_TTL = 10000; + +const api = axios.create({ + baseURL: API_BASE_URL, + timeout: 10000, + headers: { + 'Content-Type': 'application/json', + }, +}); + +let requestCounter = 0; +const generateRequestId = () => { + requestCounter += 1; + return `req-${Date.now()}-${requestCounter}`; +}; + +api.interceptors.request.use( + (config) => { + const token = getAuthToken(); + if (token) { + config.headers['Authorization'] = `Bearer ${token}`; + } + try { + if (!config.headers['X-Request-ID'] && typeof localStorage !== 'undefined') { + const corsIssue = localStorage.getItem('x-request-id-cors-issue'); + if (corsIssue !== 'true') { + config.headers['X-Request-ID'] = generateRequestId(); + } + } + } catch (e) { + console.error('Failed to set X-Request-ID header:', e); + } + return config; + }, + (error) => { + return Promise.reject(error); + } +); + +api.interceptors.response.use( + (response) => response, + (error) => { + if (error.response && error.response.status === 401 && typeof window !== 'undefined') { + window.location.href = '/'; + } + if (error.message && error.message.includes('cors') && + error.message.toLowerCase().includes('x-request-id')) { + try { + if (typeof localStorage !== 'undefined') { + localStorage.setItem('x-request-id-cors-issue', 'true'); + console.warn('Detected CORS issue with X-Request-ID header. Will stop sending this header.'); + } + } catch (e) { + } + } + return Promise.reject(error); + } +); + +export const getUserProfile = async () => { + return api.get('/api/profile'); +}; + +export const getUserRepositories = async (skip = 0, limit = 10, search = '', refresh = false) => { + try { + const params = new URLSearchParams(); + params.append('skip', skip.toString()); + params.append('limit', limit.toString()); + if (search) { + params.append('search', search); + } + if (refresh) { + params.append('refresh', 'true'); + } + const cacheKey = `repos-${skip}-${limit}-${search}-${refresh}`; + const now = Date.now(); + if (!refresh && requestCache[cacheKey] && (now - requestCache[cacheKey].timestamp < CACHE_TTL)) { + return await requestCache[cacheKey].promise; + } + const promise = api.get(`/api/repositories?${params.toString()}`, { + timeout: refresh ? 60000 : 30000, + headers: { + 'X-Request-ID': generateRequestId() + } + }); + if (!refresh) { + requestCache[cacheKey] = { + timestamp: now, + promise + }; + } + return await promise; + } catch (error) { + console.error('Error fetching repositories:', error); + if (axios.isAxiosError(error) && error.code === 'ECONNABORTED') { + throw new Error('Request timed out. Please try again later.'); + } else if (axios.isAxiosError(error) && error.response?.status === 403) { + throw new Error('GitHub API access denied. Please check your authentication or try again later.'); + } + throw error; + } +}; + +export const refreshRepositories = async (skip = 0, limit = 10, search = '') => { + console.log(`Refreshing repositories - skip: ${skip}, limit: ${limit}, search: "${search}"`); + try { + const endpoint = limit >= 100 ? '/api/repositories/force-refresh' : '/api/repositories/refresh'; + const params = new URLSearchParams(); + params.append('skip', skip.toString()); + params.append('limit', limit.toString()); + if (search) { + params.append('search', search); + } + const response = await api.get(`${endpoint}?${params.toString()}`, { + timeout: 120000, + headers: { + 'X-Request-ID': generateRequestId() + } + }); + console.log(`Refresh request successful - received response:`, response.data); + if (endpoint === '/api/repositories/force-refresh') { + await new Promise(resolve => setTimeout(resolve, 2000)); + const actualResponse = await getUserRepositories(skip, limit, search, false); + return actualResponse; + } + return response; + } catch (error) { + console.error('Repository refresh failed:', error); + if (axios.isAxiosError(error)) { + const statusCode = error.response?.status; + const errorDetails = error.response?.data; + console.error(`Refresh error details - Status: ${statusCode}, Response:`, errorDetails); + if (statusCode === 403) { + console.error('Possible GitHub API rate limit exceeded'); + } else if (statusCode === 401) { + console.error('GitHub authentication issue - token may be invalid or expired'); + } + console.error('Request details:', { + url: error.config?.url, + method: error.config?.method, + headers: error.config?.headers, + params: error.config?.params + }); + } + throw error; + } +}; + +export const getGitHubContributions = async (username: string, year = 'last') => { + if (!username) throw new Error('GitHub username is required'); + return await api.get(`api/github-contributions?year=${year}&username=${username}`); +}; + +export const runCoverageScan = async ( + repoUrl: string, + branch?: string, + options?: { + async?: boolean; + cloneTimeout?: number + } +) => { + return api.post('/coverage', { + repo_url: repoUrl, + branch, + async: options?.async || false, + clone_timeout: options?.cloneTimeout || 300 + }); +}; + +export const clearJobStatusTracking = (jobId: string) => { + try { + if (typeof localStorage !== 'undefined') { + localStorage.removeItem(`job_${jobId}_start_time`); + localStorage.removeItem(`job_${jobId}_polling`); + } + } catch (e) { + console.error("Failed to clear job status tracking:", e); + } +}; + +export const getCoverageJobStatus = async (jobId: string) => { + try { + if (typeof localStorage !== 'undefined') { + const jobStatus = localStorage.getItem(`job_${jobId}_status`); + if (jobStatus === 'completed' || jobStatus === 'failed') { + console.log(`Job ${jobId} already marked as ${jobStatus}, returning cached result`); + return { + data: { + status: jobStatus, + job_id: jobId, + result_id: localStorage.getItem(`job_${jobId}_result_id`) || undefined + } + }; + } + localStorage.setItem(`job_${jobId}_polling`, 'true'); + } + const response = await api.get(`/coverage/status/${jobId}`); + if (typeof localStorage !== 'undefined' && + (response.data.status === 'completed' || response.data.status === 'failed')) { + localStorage.setItem(`job_${jobId}_status`, response.data.status); + if (response.data.result_id) { + localStorage.setItem(`job_${jobId}_result_id`, response.data.result_id); + } + } + return response; + } catch (error) { + console.error(`Error fetching job status for ${jobId}:`, error); + throw error; + } +}; + +export const getCoverageHistory = async (repoUrl: string) => { + return api.get('/coverage/history', { params: { repo_url: repoUrl } }); +}; + +export const getCoverageById = async (id: string) => { + return api.get(`/coverage/${id}`); +}; + +export const getCoverageTrends = async (repoUrl: string, days = 30) => { + return api.get('/coverage/trends', { params: { repo_url: repoUrl, days } }); +}; + +export const scanMultipleBranches = async (repoUrl: string, branches: string[]) => { + return api.post('/coverage/branches', { repo_url: repoUrl, branches }); +}; + +export const getBranchCoverage = async (repoUrl: string) => { + return api.get('/coverage/branches', { params: { repo_url: repoUrl } }); +}; + +export const compareBranchCoverage = async (repoUrl: string, branch1: string, branch2: string) => { + return api.get('/coverage/compare', { + params: { repo_url: repoUrl, branch1, branch2 } + }); +}; + +export default api; diff --git a/poc-frontend/src/services/auth.ts b/poc-frontend/src/services/auth.ts new file mode 100644 index 0000000..cf45399 --- /dev/null +++ b/poc-frontend/src/services/auth.ts @@ -0,0 +1,59 @@ +import api from './api'; +import { setAuthToken, setUserData, clearAuthCookies, getAuthToken } from '../utils/cookies'; + +export interface AuthResponse { + token: string; + user: { + id: string; + github_id: number; + username: string; + email: string; + name: string; + avatar_url: string; + created_at: string; + updated_at: string; + }; +} + +export interface GitHubAuthRequest { + code: string; +} + +export const githubSignUp = async (code: string): Promise => { + const response = await api.post('/auth/github/signup', { code }); + + if (response.data) { + setAuthToken(response.data.token); + setUserData(response.data.user); + } + + return response.data; +}; + +export const githubSignIn = async (code: string): Promise => { + const response = await api.post('/auth/github/signin', { code }); + + if (response.data) { + setAuthToken(response.data.token); + setUserData(response.data.user); + } + + return response.data; +}; + +export const getUserProfile = async () => { + const response = await api.get('/api/profile'); + return response.data; +}; + +export const signOut = () => { + clearAuthCookies(); + if (typeof window !== 'undefined') { + window.location.href = '/'; + } +}; + +export const isAuthenticated = (): boolean => { + const token = getAuthToken(); + return !!token; +}; diff --git a/poc-frontend/src/types/coverage.ts b/poc-frontend/src/types/coverage.ts new file mode 100644 index 0000000..69fc491 --- /dev/null +++ b/poc-frontend/src/types/coverage.ts @@ -0,0 +1,75 @@ +// File: src/types/coverage.ts +export interface FileCoverage { + file: string; + coverage: number; +} + +export interface CoverageResponse { + total_coverage: number; + files: FileCoverage[]; + id?: string; + repository?: string; + branch?: string; + timestamp?: string; + commit_hash?: string; +} + +export interface CoverageHistory { + id: string; + repository: string; + branch: string; + total_coverage: number; + files: FileCoverage[]; + timestamp: string; + commit_hash?: string; +} + +export interface CoverageTrend { + date: string; + coverage: number; + commit_hash?: string; +} + +export interface BranchCoverage { + id: string; + branch: string; + total_coverage: number; + timestamp: string; + commit_hash?: string; +} + +export interface FileDiff { + file: string; + branch1: number; + branch2: number; + diff: number; + diff_label: 'better' | 'worse' | 'same' | 'new' | 'removed'; +} + +export interface BranchCompareResult { + repository: string; + branch1: string; + branch2: string; + coverage1: number; + coverage2: number; + coverage_diff: number; + diff_label: 'better' | 'worse' | 'same'; + file_diffs: FileDiff[]; + branch1_date: string; + branch2_date: string; + branch1_commit?: string; + branch2_commit?: string; +} + +export interface MultiBranchScanResult { + repo_url: string; + total_scanned: number; + successful: number; + failed: number; + branches: Array<{ + branch: string; + status: string; + coverage?: number; + error?: string; + }>; +} diff --git a/poc-frontend/src/types/dashboardTypes.ts b/poc-frontend/src/types/dashboardTypes.ts new file mode 100644 index 0000000..337fc2c --- /dev/null +++ b/poc-frontend/src/types/dashboardTypes.ts @@ -0,0 +1,133 @@ +export interface Repository { + _id?: string; + name: string; + owner: string; + status: 'active' | 'inactive' | 'error'; + url: string; + description?: string; +} + +export interface MetricsData { + passRate: number; + totalRepositories: number; + totalTests: number; + testsLast7Days: number; + averageDuration: number; + testsByStatus: { + passed: number; + failed: number; + skipped: number; + error: number; + }; +} + +export interface TimeSeriesDataPoint { + date: string; + total: number; + passed: number; + failed: number; + skipped: number; + error?: number; + avgDuration?: number; +} + +export interface TestResult { + _id: string; + repositoryId: string; + name: string; + status: 'passed' | 'failed' | 'skipped' | 'error'; + duration: number; + executedAt: string; + endpoint?: string; + method?: string; + details?: { + error?: string; + requestBody?: any; + responseBody?: any; + statusCode?: number; + }; +} + +export interface RepositoryTestsResponse { + success: boolean; + count: number; + statusCounts: { + passed: number; + failed: number; + skipped: number; + error: number; + }; + data: TestResult[]; +} + +export interface LayoutConfig { + i: string; + x: number; + y: number; + w: number; + h: number; + minW?: number; + minH?: number; + maxW?: number; + maxH?: number; +} + +export interface DashboardWidgetType { + widgetKey: string; + title: string; + type: 'chart' | 'card' | 'table' | 'custom'; + dataSource: string; + position: { + x: number; + y: number; + w: number; + h: number; + }; + settings?: { + [key: string]: any; + chartType?: 'line' | 'bar' | 'pie' | 'doughnut' | 'gauge'; + showLegend?: boolean; + fillArea?: boolean; + showPassed?: boolean; + showFailed?: boolean; + showSkipped?: boolean; + showError?: boolean; + metric?: string; + showTrend?: boolean; + pageSize?: number; + showPagination?: boolean; + sortable?: boolean; + }; +} + +export interface DashboardComponent { + id: string; + name: string; + type: "chart" | "card" | "table" | "custom"; + subtype?: string; + description: string; + defaultDataSource: string; + defaultSize: { + w: number; + h: number; + }; + defaultSettings?: { + [key: string]: any; + }; +} + +export interface DashboardConfig { + id?: string; + userId: string; + name: string; + description?: string; + layout?: string; + widgets: DashboardWidgetType[]; + createdAt?: string; + updatedAt?: string; +} + +export interface WebSocketMessage { + type: string; + data?: any; +} diff --git a/poc-frontend/src/types/job.ts b/poc-frontend/src/types/job.ts new file mode 100644 index 0000000..c335312 --- /dev/null +++ b/poc-frontend/src/types/job.ts @@ -0,0 +1,12 @@ +export interface JobStatus { + id: string; + job_type: string; + status: 'pending' | 'in_progress' | 'completed' | 'failed'; + start_time: string; + end_time?: string; + error?: string; + repository: string; + branch?: string; + result_id?: string; + progress: number; +} diff --git a/poc-frontend/src/types/repository.ts b/poc-frontend/src/types/repository.ts new file mode 100644 index 0000000..d99b446 --- /dev/null +++ b/poc-frontend/src/types/repository.ts @@ -0,0 +1,17 @@ +export interface Repository { + id: string; + name: string; + fullName?: string; + full_name?: string; + description: string; + url: string; + html_url: string; + owner: string; + githubId?: number; + github_id?: number; + private: boolean; + status: string; + created_at: string; + updated_at: string; + languages?: Record; +} diff --git a/poc-frontend/src/utils/cookies.ts b/poc-frontend/src/utils/cookies.ts new file mode 100644 index 0000000..f857fd3 --- /dev/null +++ b/poc-frontend/src/utils/cookies.ts @@ -0,0 +1,73 @@ +import { Cookies } from 'react-cookie'; +import { CookieSetOptions } from 'universal-cookie'; + +const cookies = new Cookies(); + +// Cookie default configuration +const defaultOptions: CookieSetOptions = { + path: '/', + secure: process.env.NODE_ENV === 'production', + sameSite: 'strict', +}; + +export const setCookie = ( + key: string, + value: string, + options?: CookieSetOptions +): void => { + cookies.set(key, value, { ...defaultOptions, ...options }); +}; + +export const getCookie = (key: string): string | undefined => { + return cookies.get(key); +}; + + +export const removeCookie = ( + key: string, + options?: CookieSetOptions +): void => { + cookies.remove(key, { ...defaultOptions, ...options }); +}; + +export const AUTH_TOKEN_KEY = 'auth_token'; +export const USER_DATA_KEY = 'user_data'; + +export const setAuthToken = (token: string): void => { + setCookie(AUTH_TOKEN_KEY, token); +}; + +export const getAuthToken = (): string | undefined => { + return getCookie(AUTH_TOKEN_KEY); +}; + +export const removeAuthToken = (): void => { + removeCookie(AUTH_TOKEN_KEY); +}; + +export const setUserData = (userData: any): void => { + setCookie(USER_DATA_KEY, JSON.stringify(userData)); +}; + +export const getUserData = (): any | undefined => { + const data = getCookie(USER_DATA_KEY); + if (data) { + try { + return JSON.parse(data); + } catch (error) { + console.error('Error parsing user data from cookie:', error); + return undefined; + } + } + return undefined; +}; + +export const removeUserData = (): void => { + removeCookie(USER_DATA_KEY); +}; + +// Clear all auth related cookies +export const clearAuthCookies = (): void => { + removeAuthToken(); + removeUserData(); +}; diff --git a/poc-frontend/tailwind.config.js b/poc-frontend/tailwind.config.js new file mode 100644 index 0000000..e2bbbd6 --- /dev/null +++ b/poc-frontend/tailwind.config.js @@ -0,0 +1,14 @@ +/** @type {import('tailwindcss').Config} */ +module.exports = { + content: [ + "./src/**/*.{js,ts,jsx,tsx,mdx}", + ], + theme: { + extend: { + gridTemplateColumns: { + '53': 'repeat(53, minmax(0, 1fr))', + }, + }, + }, + plugins: [], +} diff --git a/poc-frontend/tsconfig.json b/poc-frontend/tsconfig.json new file mode 100644 index 0000000..c133409 --- /dev/null +++ b/poc-frontend/tsconfig.json @@ -0,0 +1,27 @@ +{ + "compilerOptions": { + "target": "ES2017", + "lib": ["dom", "dom.iterable", "esnext"], + "allowJs": true, + "skipLibCheck": true, + "strict": true, + "noEmit": true, + "esModuleInterop": true, + "module": "esnext", + "moduleResolution": "bundler", + "resolveJsonModule": true, + "isolatedModules": true, + "jsx": "preserve", + "incremental": true, + "plugins": [ + { + "name": "next" + } + ], + "paths": { + "@/*": ["./src/*"] + } + }, + "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"], + "exclude": ["node_modules"] +}