diff --git a/.claude/settings.local.json b/.claude/settings.local.json index 20c5f4a..f16d4f4 100644 --- a/.claude/settings.local.json +++ b/.claude/settings.local.json @@ -50,7 +50,15 @@ "Bash(powershell -Command \"Get-Content ''e:\\\\MyTaskly\\\\MyTaskly-app\\\\src\\\\locales\\\\en.json'' | Select-Object -Last 20\")", "Bash(powershell -Command \"Get-Content ''e:\\\\MyTaskly\\\\MyTaskly-app\\\\src\\\\locales\\\\it.json'' | Select-Object -Last 20\")", "Bash(npm ls:*)", - "Bash(curl:*)" + "Bash(curl:*)", + "Bash(export NVM_DIR=\"$HOME/.nvm\")", + "Bash([ -s \"$NVM_DIR/nvm.sh\" ])", + "Bash(. \"$NVM_DIR/nvm.sh\")", + "Bash(npm test:*)", + "Bash(xargs:*)", + "WebFetch(domain:www.npmjs.com)", + "WebFetch(domain:docs.expo.dev)", + "WebFetch(domain:github.com)" ], "deny": [], "defaultMode": "acceptEdits" diff --git a/.gitignore b/.gitignore index e170043..0841b67 100644 --- a/.gitignore +++ b/.gitignore @@ -51,3 +51,14 @@ docs/ # Firebase secrets *firebase-adminsdk*.json + +# Auto Claude data directory +.auto-claude/ + +# Auto Claude generated files +.auto-claude-security.json +.auto-claude-status +.claude_settings.json +.worktrees/ +.security-key +logs/security/ diff --git a/app.json b/app.json index 5063c7f..9872283 100644 --- a/app.json +++ b/app.json @@ -74,7 +74,8 @@ } ], "expo-dev-client", - "expo-router" + "expo-router", + "expo-audio" ], "extra": { "eas": { diff --git a/package-lock.json b/package-lock.json index c9338df..f0617ac 100644 --- a/package-lock.json +++ b/package-lock.json @@ -11,6 +11,7 @@ "@expo/vector-icons": "^14.0.2", "@flyerhq/react-native-chat-ui": "^1.4.3", "@openspacelabs/react-native-zoomable-view": "^2.3.1", + "@picovoice/react-native-voice-processor": "^1.2.3", "@react-native-async-storage/async-storage": "^2.2.0", "@react-native-community/datetimepicker": "8.4.1", "@react-native-google-signin/google-signin": "^15.0.0", @@ -26,6 +27,7 @@ "dotenv": "^17.2.1", "eventsource": "^4.0.0", "expo": "~53.0.23", + "expo-audio": "~0.4.9", "expo-av": "^15.1.7", "expo-blur": "~14.1.5", "expo-constants": "~17.1.7", @@ -75,7 +77,7 @@ }, "devDependencies": { "@babel/core": "^7.25.2", - "@react-native-community/cli": "^18.0.0", + "@react-native-community/cli": "^14.0.1", "@types/jest": "^29.5.12", "@types/react": "~19.0.10", "@types/react-test-renderer": "^18.3.0", @@ -3599,6 +3601,19 @@ "react-native": ">=0.54.0" } }, + "node_modules/@picovoice/react-native-voice-processor": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@picovoice/react-native-voice-processor/-/react-native-voice-processor-1.2.3.tgz", + "integrity": "sha512-GFnuKXWIOrDTPumcFkwfGaGUt2X1Vq31cU0sM4CZ9o/SomZQxJml12nr8d4uxjG03Z/eouWGN/0AcxZPdqihlw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.0.0" + }, + "peerDependencies": { + "react": "*", + "react-native": "*" + } + }, "node_modules/@pkgjs/parseargs": { "version": "0.11.0", "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", @@ -3655,18 +3670,19 @@ } }, "node_modules/@react-native-community/cli": { - "version": "18.0.1", - "resolved": "https://registry.npmjs.org/@react-native-community/cli/-/cli-18.0.1.tgz", - "integrity": "sha512-nEEYwfyP00j9i4nr/HhwPabDscoBhhCjxDBpcKERi2oTYHcBr3FTu9PV1gbeJCa4vRCHr6b6VgOcVTdy99NddQ==", + "version": "14.0.1", + "resolved": "https://registry.npmjs.org/@react-native-community/cli/-/cli-14.0.1.tgz", + "integrity": "sha512-QxBbQmZhhDZKOGTIjPr0cDrFcVrxlzFG1BmOuhok3x4xUk09B3zyIl8xnaqQ53YIVhXQHS0BX0f0evuKBcbcqQ==", "devOptional": true, "license": "MIT", "dependencies": { - "@react-native-community/cli-clean": "18.0.1", - "@react-native-community/cli-config": "18.0.1", - "@react-native-community/cli-doctor": "18.0.1", - "@react-native-community/cli-server-api": "18.0.1", - "@react-native-community/cli-tools": "18.0.1", - "@react-native-community/cli-types": "18.0.1", + "@react-native-community/cli-clean": "14.0.1", + "@react-native-community/cli-config": "14.0.1", + "@react-native-community/cli-debugger-ui": "14.0.1", + "@react-native-community/cli-doctor": "14.0.1", + "@react-native-community/cli-server-api": "14.0.1", + "@react-native-community/cli-tools": "14.0.1", + "@react-native-community/cli-types": "14.0.1", "chalk": "^4.1.2", "commander": "^9.4.1", "deepmerge": "^4.3.0", @@ -3685,26 +3701,26 @@ } }, "node_modules/@react-native-community/cli-clean": { - "version": "18.0.1", - "resolved": "https://registry.npmjs.org/@react-native-community/cli-clean/-/cli-clean-18.0.1.tgz", - "integrity": "sha512-brXqk//lmA2Vs5lGq9YLhk7X4IYBSrDRte6t1AktouJpCrH4Tp1sl45yJDS2CHOi/OY1oOfI3kA61tXNX5a/5A==", + "version": "14.0.1", + "resolved": "https://registry.npmjs.org/@react-native-community/cli-clean/-/cli-clean-14.0.1.tgz", + "integrity": "sha512-vf6dEwq0WmsQu2BViSZI8ascTdk7J4o82FRqpHiYSZkQau6eRmYFrgLoC5hg985Hp/HD+3wRSDW+lB/2+fz3yA==", "devOptional": true, "license": "MIT", "dependencies": { - "@react-native-community/cli-tools": "18.0.1", + "@react-native-community/cli-tools": "14.0.1", "chalk": "^4.1.2", "execa": "^5.0.0", "fast-glob": "^3.3.2" } }, "node_modules/@react-native-community/cli-config": { - "version": "18.0.1", - "resolved": "https://registry.npmjs.org/@react-native-community/cli-config/-/cli-config-18.0.1.tgz", - "integrity": "sha512-O4DDJVMx+DYfwEgF/6lB4hoI9sVjrYW6AlLqeJY/D2XH2e4yqK/Pr3SAi4sOMgvjvYZKzLHqIQVxx54v+LyMQA==", + "version": "14.0.1", + "resolved": "https://registry.npmjs.org/@react-native-community/cli-config/-/cli-config-14.0.1.tgz", + "integrity": "sha512-yiyBdrsqZmaTtb5XFcfCwFavPagw7UxQVKnRBNVdwQNQHsRqhhPWZ0c7W+sESTMC/L9T0zvr7k4dxZ6TJEqqKQ==", "devOptional": true, "license": "MIT", "dependencies": { - "@react-native-community/cli-tools": "18.0.1", + "@react-native-community/cli-tools": "14.0.1", "chalk": "^4.1.2", "cosmiconfig": "^9.0.0", "deepmerge": "^4.3.0", @@ -3712,44 +3728,28 @@ "joi": "^17.2.1" } }, - "node_modules/@react-native-community/cli-config-android": { - "version": "18.0.1", - "resolved": "https://registry.npmjs.org/@react-native-community/cli-config-android/-/cli-config-android-18.0.1.tgz", - "integrity": "sha512-1wzmGLfS7qgzm0ZfwX/f6Lat/af8/UYdjwtb3ap6RfKNclvIoap0wN6uBeiANmLfk0/BhoG8K1vKtIPwlU/V1A==", + "node_modules/@react-native-community/cli-debugger-ui": { + "version": "14.0.1", + "resolved": "https://registry.npmjs.org/@react-native-community/cli-debugger-ui/-/cli-debugger-ui-14.0.1.tgz", + "integrity": "sha512-8eQ1U21Uwwm71jmIi9iaiou2VxqJQTm9k7Ch5Nj1ZEErO+nKFc9qExPTGtzsE80mHncLq8yNkQfV5waJvvhclg==", "devOptional": true, "license": "MIT", "dependencies": { - "@react-native-community/cli-tools": "18.0.1", - "chalk": "^4.1.2", - "fast-glob": "^3.3.2", - "fast-xml-parser": "^4.4.1" - } - }, - "node_modules/@react-native-community/cli-config-apple": { - "version": "18.0.1", - "resolved": "https://registry.npmjs.org/@react-native-community/cli-config-apple/-/cli-config-apple-18.0.1.tgz", - "integrity": "sha512-ybr1ZrOSd/Z+oCJ1qVSKVQauvneObTu3VjvYPhhrme7tUUSaYmd3iikaWonbKk5rVp+2WqOFR6Cy7XqVfwwG8A==", - "devOptional": true, - "license": "MIT", - "dependencies": { - "@react-native-community/cli-tools": "18.0.1", - "chalk": "^4.1.2", - "execa": "^5.0.0", - "fast-glob": "^3.3.2" + "serve-static": "^1.13.1" } }, "node_modules/@react-native-community/cli-doctor": { - "version": "18.0.1", - "resolved": "https://registry.npmjs.org/@react-native-community/cli-doctor/-/cli-doctor-18.0.1.tgz", - "integrity": "sha512-B1UWpiVeJ45DX0ip1Et62knAHLzeA1B3XcTJu16PscednnNxV6GBH52kRUoWMsB8HQ8f9IWdFTol8LAp5Y0wwg==", + "version": "14.0.1", + "resolved": "https://registry.npmjs.org/@react-native-community/cli-doctor/-/cli-doctor-14.0.1.tgz", + "integrity": "sha512-wXLa4cPgNuFJE6K2dv/0JRq62iJUiSVLN0Wa5TxugX+ojXBle/oe1W3nibOJFYdyRIEUGk3u7A6gdoznNtZEJg==", "devOptional": true, "license": "MIT", "dependencies": { - "@react-native-community/cli-config": "18.0.1", - "@react-native-community/cli-platform-android": "18.0.1", - "@react-native-community/cli-platform-apple": "18.0.1", - "@react-native-community/cli-platform-ios": "18.0.1", - "@react-native-community/cli-tools": "18.0.1", + "@react-native-community/cli-config": "14.0.1", + "@react-native-community/cli-platform-android": "14.0.1", + "@react-native-community/cli-platform-apple": "14.0.1", + "@react-native-community/cli-platform-ios": "14.0.1", + "@react-native-community/cli-tools": "14.0.1", "chalk": "^4.1.2", "command-exists": "^1.2.8", "deepmerge": "^4.3.0", @@ -3758,10 +3758,21 @@ "node-stream-zip": "^1.9.1", "ora": "^5.4.1", "semver": "^7.5.2", + "strip-ansi": "^5.2.0", "wcwidth": "^1.0.1", "yaml": "^2.2.1" } }, + "node_modules/@react-native-community/cli-doctor/node_modules/ansi-regex": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", + "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==", + "devOptional": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/@react-native-community/cli-doctor/node_modules/semver": { "version": "7.7.3", "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", @@ -3775,80 +3786,94 @@ "node": ">=10" } }, + "node_modules/@react-native-community/cli-doctor/node_modules/strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^4.1.0" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/@react-native-community/cli-platform-android": { - "version": "18.0.1", - "resolved": "https://registry.npmjs.org/@react-native-community/cli-platform-android/-/cli-platform-android-18.0.1.tgz", - "integrity": "sha512-DCltVWDR7jfZZG5MXREVKG0fmIr1b0irEhmdkk/R87dG6HJ8tGXWXnAa4Kap8bx2v6lKFXDW5QxNecyyLCOkVw==", + "version": "14.0.1", + "resolved": "https://registry.npmjs.org/@react-native-community/cli-platform-android/-/cli-platform-android-14.0.1.tgz", + "integrity": "sha512-HNLIuD3ie8p1cLhsvfH19XPhjZjqZGX+zYlm4lpOKxPTSEzkaiAdOIMj5dVdN79zO1tmEawyyrwT4Jx5ddeUsQ==", "devOptional": true, "license": "MIT", "dependencies": { - "@react-native-community/cli-config-android": "18.0.1", - "@react-native-community/cli-tools": "18.0.1", + "@react-native-community/cli-tools": "14.0.1", "chalk": "^4.1.2", "execa": "^5.0.0", + "fast-glob": "^3.3.2", + "fast-xml-parser": "^4.2.4", "logkitty": "^0.7.1" } }, "node_modules/@react-native-community/cli-platform-apple": { - "version": "18.0.1", - "resolved": "https://registry.npmjs.org/@react-native-community/cli-platform-apple/-/cli-platform-apple-18.0.1.tgz", - "integrity": "sha512-7WxGXT/ui7VtyLVjx5rkYkkTMlbufI6p5BdRKjGp/zQDnDzs/0rle0JEHJxwgvs5VQnt+VOnHBMipkQAhydIqQ==", + "version": "14.0.1", + "resolved": "https://registry.npmjs.org/@react-native-community/cli-platform-apple/-/cli-platform-apple-14.0.1.tgz", + "integrity": "sha512-KY7zlDpzKfVzBuHWde7EXKgdkFFYl5Lp1o1ro8ie83sDzwSVTSyd5Pmb351SJABJuNFSuTzbyyMiuG+1BQ4hyg==", "devOptional": true, "license": "MIT", "dependencies": { - "@react-native-community/cli-config-apple": "18.0.1", - "@react-native-community/cli-tools": "18.0.1", + "@react-native-community/cli-tools": "14.0.1", "chalk": "^4.1.2", "execa": "^5.0.0", - "fast-xml-parser": "^4.4.1" + "fast-glob": "^3.3.2", + "fast-xml-parser": "^4.2.4", + "ora": "^5.4.1" } }, "node_modules/@react-native-community/cli-platform-ios": { - "version": "18.0.1", - "resolved": "https://registry.npmjs.org/@react-native-community/cli-platform-ios/-/cli-platform-ios-18.0.1.tgz", - "integrity": "sha512-GtO1FB+xaz+vcHIdvl94AkD5B8Y+H8XHb6QwnYX+A3WwteGsHrR8iD/bLLcRtNPtLaAWMa/RgWJpgs4KG+eU4w==", + "version": "14.0.1", + "resolved": "https://registry.npmjs.org/@react-native-community/cli-platform-ios/-/cli-platform-ios-14.0.1.tgz", + "integrity": "sha512-9DKbMHf53A4ae2sJ0rZFqi2m7Z1NoaWZYhkWufO8tEXCKckBat4ddio/Ei6g0bQk8IKdLzHogQFMuU3jbT3LYA==", "devOptional": true, "license": "MIT", "dependencies": { - "@react-native-community/cli-platform-apple": "18.0.1" + "@react-native-community/cli-platform-apple": "14.0.1" } }, "node_modules/@react-native-community/cli-server-api": { - "version": "18.0.1", - "resolved": "https://registry.npmjs.org/@react-native-community/cli-server-api/-/cli-server-api-18.0.1.tgz", - "integrity": "sha512-ZRy2IjEM4ljP05bZcnXho0sCxVGI/9SkWkLuzXl+cRu/4I8vLRleihn2GJCopg82QHLLrajUCHhpDKE8NJWcRw==", + "version": "14.0.1", + "resolved": "https://registry.npmjs.org/@react-native-community/cli-server-api/-/cli-server-api-14.0.1.tgz", + "integrity": "sha512-8oKMcDH72vuX2pWv+N07NcBWoMOykK7cUKshCjSqupWNr1wGxHnJC4uFvLLAwpoewYlOQXMnU9Jn/9IWA6Sm2w==", "devOptional": true, "license": "MIT", "dependencies": { - "@react-native-community/cli-tools": "18.0.1", - "body-parser": "^1.20.3", + "@react-native-community/cli-debugger-ui": "14.0.1", + "@react-native-community/cli-tools": "14.0.1", "compression": "^1.7.1", "connect": "^3.6.5", "errorhandler": "^1.5.1", "nocache": "^3.0.1", - "open": "^6.2.0", "pretty-format": "^26.6.2", "serve-static": "^1.13.1", "ws": "^6.2.3" } }, "node_modules/@react-native-community/cli-tools": { - "version": "18.0.1", - "resolved": "https://registry.npmjs.org/@react-native-community/cli-tools/-/cli-tools-18.0.1.tgz", - "integrity": "sha512-WxWFXwfYhHR2eYiB4lkHZVC/PmIkRWeVHBQKmn0h1mecr3GrHYO4BzW1jpD5Xt6XZ9jojQ9wE5xrCqXjiMSAIQ==", + "version": "14.0.1", + "resolved": "https://registry.npmjs.org/@react-native-community/cli-tools/-/cli-tools-14.0.1.tgz", + "integrity": "sha512-4tksgruPliZlQmO6kfX1gUBNvDChoOcZgGt9r4KvvyFelCR3VUWEoJkvhKlDlSoaEtzBO2L426CTnFK4ED/yBQ==", "devOptional": true, "license": "MIT", "dependencies": { - "@vscode/sudo-prompt": "^9.0.0", "appdirsjs": "^1.2.4", "chalk": "^4.1.2", "execa": "^5.0.0", "find-up": "^5.0.0", - "launch-editor": "^2.9.1", "mime": "^2.4.1", + "open": "^6.2.0", "ora": "^5.4.1", - "prompts": "^2.4.2", - "semver": "^7.5.2" + "semver": "^7.5.2", + "shell-quote": "^1.7.3", + "sudo-prompt": "^9.0.0" } }, "node_modules/@react-native-community/cli-tools/node_modules/semver": { @@ -3865,9 +3890,9 @@ } }, "node_modules/@react-native-community/cli-types": { - "version": "18.0.1", - "resolved": "https://registry.npmjs.org/@react-native-community/cli-types/-/cli-types-18.0.1.tgz", - "integrity": "sha512-pGxr/TSP9Xiw2+9TUn3OWLdcuI4+PJozPsCYZVTGWJ96X6Pv7YX/rNy4emIDkaWaFZ7IWgWXUA725KhEINSf3Q==", + "version": "14.0.1", + "resolved": "https://registry.npmjs.org/@react-native-community/cli-types/-/cli-types-14.0.1.tgz", + "integrity": "sha512-Yk6TLdkSQHaSG9NlcMsHtX7ZYuHaOrZ7K/3zBbpabWUm006blND5NKw8YQuk9guqK7i/PMSUKJUq1NbF7bGjDA==", "devOptional": true, "license": "MIT", "dependencies": { @@ -5393,13 +5418,6 @@ "@urql/core": "^5.0.0" } }, - "node_modules/@vscode/sudo-prompt": { - "version": "9.3.1", - "resolved": "https://registry.npmjs.org/@vscode/sudo-prompt/-/sudo-prompt-9.3.1.tgz", - "integrity": "sha512-9ORTwwS74VaTn38tNbQhsA5U44zkJfcb0BdTSyyG6frP4e8KMtHuTXYmwefe5dpL8XB1aGSIVTaLjD3BbWb5iA==", - "devOptional": true, - "license": "MIT" - }, "node_modules/@webassemblyjs/ast": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.14.1.tgz", @@ -6464,48 +6482,6 @@ "readable-stream": "^3.4.0" } }, - "node_modules/body-parser": { - "version": "1.20.3", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", - "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", - "devOptional": true, - "license": "MIT", - "dependencies": { - "bytes": "3.1.2", - "content-type": "~1.0.5", - "debug": "2.6.9", - "depd": "2.0.0", - "destroy": "1.2.0", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "on-finished": "2.4.1", - "qs": "6.13.0", - "raw-body": "2.5.2", - "type-is": "~1.6.18", - "unpipe": "1.0.0" - }, - "engines": { - "node": ">= 0.8", - "npm": "1.2.8000 || >= 1.4.16" - } - }, - "node_modules/body-parser/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "devOptional": true, - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/body-parser/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "devOptional": true, - "license": "MIT" - }, "node_modules/boolbase": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", @@ -7144,16 +7120,6 @@ "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", "license": "MIT" }, - "node_modules/content-type": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", - "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", - "devOptional": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, "node_modules/convert-source-map": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", @@ -7871,9 +7837,9 @@ } }, "node_modules/envinfo": { - "version": "7.20.0", - "resolved": "https://registry.npmjs.org/envinfo/-/envinfo-7.20.0.tgz", - "integrity": "sha512-+zUomDcLXsVkQ37vUqWBvQwLaLlj8eZPSi61llaEFAVBY5mhcXdaSw1pSJVl4yTYD5g/gEfpNl28YYk4IPvrrg==", + "version": "7.21.0", + "resolved": "https://registry.npmjs.org/envinfo/-/envinfo-7.21.0.tgz", + "integrity": "sha512-Lw7I8Zp5YKHFCXL7+Dz95g4CcbMEpgvqZNNq3AmlT5XAV6CgAAk6gyAMqn2zjw08K9BHfcNuKrMiCPLByGafow==", "devOptional": true, "license": "MIT", "bin": { @@ -7902,17 +7868,21 @@ } }, "node_modules/errorhandler": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/errorhandler/-/errorhandler-1.5.1.tgz", - "integrity": "sha512-rcOwbfvP1WTViVoUjcfZicVzjhjTuhSMntHh6mW3IrEiyE6mJyXvsToJUJGlGlw/2xU9P5whlWNGlIDVeCiT4A==", + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/errorhandler/-/errorhandler-1.5.2.tgz", + "integrity": "sha512-kNAL7hESndBCrWwS72QyV3IVOTrVmj9D062FV5BQswNL5zEdeRmz/WJFyh6Aj/plvvSOrzddkxW57HgkZcR9Fw==", "devOptional": true, "license": "MIT", "dependencies": { - "accepts": "~1.3.7", + "accepts": "~1.3.8", "escape-html": "~1.0.3" }, "engines": { "node": ">= 0.8" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/es-abstract": { @@ -8734,6 +8704,17 @@ "react-native": "*" } }, + "node_modules/expo-audio": { + "version": "0.4.9", + "resolved": "https://registry.npmjs.org/expo-audio/-/expo-audio-0.4.9.tgz", + "integrity": "sha512-J4mMYEt2mqRqqwmSsXFylMGlrNWa+MbCzGl1IZBs+smvPAMJ3Ni8fNplzCQ0I9RnRzygKhRwJNpnAVL+n4MuyA==", + "license": "MIT", + "peerDependencies": { + "expo": "*", + "react": "*", + "react-native": "*" + } + }, "node_modules/expo-av": { "version": "15.1.7", "resolved": "https://registry.npmjs.org/expo-av/-/expo-av-15.1.7.tgz", @@ -10199,19 +10180,6 @@ } } }, - "node_modules/iconv-lite": { - "version": "0.4.24", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", - "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", - "devOptional": true, - "license": "MIT", - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/ieee754": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", @@ -12398,17 +12366,6 @@ "lan-network": "dist/lan-network-cli.js" } }, - "node_modules/launch-editor": { - "version": "2.12.0", - "resolved": "https://registry.npmjs.org/launch-editor/-/launch-editor-2.12.0.tgz", - "integrity": "sha512-giOHXoOtifjdHqUamwKq6c49GzBdLjvxrd2D+Q4V6uOHopJv7p9VJxikDsQ/CBXZbEITgUqSVHXLTG3VhPP1Dg==", - "devOptional": true, - "license": "MIT", - "dependencies": { - "picocolors": "^1.1.1", - "shell-quote": "^1.8.3" - } - }, "node_modules/leven": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", @@ -12731,9 +12688,9 @@ } }, "node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "version": "4.17.23", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz", + "integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==", "license": "MIT" }, "node_modules/lodash.debounce": { @@ -13103,16 +13060,6 @@ "integrity": "sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==", "license": "MIT" }, - "node_modules/media-typer": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", - "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", - "devOptional": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, "node_modules/memoize-one": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/memoize-one/-/memoize-one-5.2.1.tgz", @@ -13956,9 +13903,9 @@ } }, "node_modules/node-forge": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz", - "integrity": "sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==", + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.3.tgz", + "integrity": "sha512-rLvcdSyRCyouf6jcOIPe/BgwG/d7hKjzMKOas33/pHEr6gbq18IK9zV7DiPvzsz0oBJPme6qr6H6kGZuI9/DZg==", "license": "(BSD-3-Clause OR GPL-2.0)", "engines": { "node": ">= 6.13.0" @@ -14089,7 +14036,7 @@ "version": "1.13.4", "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", - "devOptional": true, + "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -14874,22 +14821,6 @@ "qrcode-terminal": "bin/qrcode-terminal.js" } }, - "node_modules/qs": { - "version": "6.13.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", - "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", - "devOptional": true, - "license": "BSD-3-Clause", - "dependencies": { - "side-channel": "^1.0.6" - }, - "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/query-string": { "version": "7.1.3", "resolved": "https://registry.npmjs.org/query-string/-/query-string-7.1.3.tgz", @@ -14965,22 +14896,6 @@ "node": ">= 0.6" } }, - "node_modules/raw-body": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", - "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", - "devOptional": true, - "license": "MIT", - "dependencies": { - "bytes": "3.1.2", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "unpipe": "1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, "node_modules/rc": { "version": "1.2.8", "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", @@ -16167,7 +16082,7 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "devOptional": true, + "dev": true, "license": "MIT" }, "node_modules/sax": { @@ -16572,7 +16487,7 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", - "devOptional": true, + "dev": true, "license": "MIT", "dependencies": { "es-errors": "^1.3.0", @@ -16592,7 +16507,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", - "devOptional": true, + "dev": true, "license": "MIT", "dependencies": { "es-errors": "^1.3.0", @@ -16609,7 +16524,7 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", - "devOptional": true, + "dev": true, "license": "MIT", "dependencies": { "call-bound": "^1.0.2", @@ -16628,7 +16543,7 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", - "devOptional": true, + "dev": true, "license": "MIT", "dependencies": { "call-bound": "^1.0.2", @@ -17271,6 +17186,14 @@ "node": ">= 6" } }, + "node_modules/sudo-prompt": { + "version": "9.2.1", + "resolved": "https://registry.npmjs.org/sudo-prompt/-/sudo-prompt-9.2.1.tgz", + "integrity": "sha512-Mu7R0g4ig9TUuGSxJavny5Rv0egCEtpZRNMrZaYS1vxkiIxGiGUwoezU3LazIQ+KE04hTrTfNPgxU5gzi7F5Pw==", + "deprecated": "Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.", + "devOptional": true, + "license": "MIT" + }, "node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -17331,9 +17254,9 @@ } }, "node_modules/tar": { - "version": "7.5.2", - "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.2.tgz", - "integrity": "sha512-7NyxrTE4Anh8km8iEy7o0QYPs+0JKBTj5ZaqHg6B39erLg0qYXN3BijtShwbsNSvQ+LN75+KV+C4QR/f6Gwnpg==", + "version": "7.5.7", + "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.7.tgz", + "integrity": "sha512-fov56fJiRuThVFXD6o6/Q354S7pnWMJIVlDBYijsTNx6jKSE4pvrDTs6lUnmGvNyfJwFQQwWy3owKz1ucIhveQ==", "license": "BlueOak-1.0.0", "dependencies": { "@isaacs/fs-minipass": "^4.0.0", @@ -17734,20 +17657,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/type-is": { - "version": "1.6.18", - "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", - "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", - "devOptional": true, - "license": "MIT", - "dependencies": { - "media-typer": "0.3.0", - "mime-types": "~2.1.24" - }, - "engines": { - "node": ">= 0.6" - } - }, "node_modules/typed-array-buffer": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz", @@ -18010,9 +17919,9 @@ } }, "node_modules/undici": { - "version": "6.22.0", - "resolved": "https://registry.npmjs.org/undici/-/undici-6.22.0.tgz", - "integrity": "sha512-hU/10obOIu62MGYjdskASR3CUAiYaFTtC9Pa6vHyf//mAipSvSQg6od2CnJswq7fvzNS3zJhxoRkgNVaHurWKw==", + "version": "6.23.0", + "resolved": "https://registry.npmjs.org/undici/-/undici-6.23.0.tgz", + "integrity": "sha512-VfQPToRA5FZs/qJxLIinmU59u0r7LXqoJkCzinq3ckNJp3vKEh7jTWN589YQ5+aoAC/TGRLyJLCPKcLQbM8r9g==", "license": "MIT", "engines": { "node": ">=18.17" diff --git a/package.json b/package.json index 3329e34..8554fa1 100644 --- a/package.json +++ b/package.json @@ -21,6 +21,7 @@ "@expo/vector-icons": "^14.0.2", "@flyerhq/react-native-chat-ui": "^1.4.3", "@openspacelabs/react-native-zoomable-view": "^2.3.1", + "@picovoice/react-native-voice-processor": "^1.2.3", "@react-native-async-storage/async-storage": "^2.2.0", "@react-native-community/datetimepicker": "8.4.1", "@react-native-google-signin/google-signin": "^15.0.0", @@ -36,6 +37,7 @@ "dotenv": "^17.2.1", "eventsource": "^4.0.0", "expo": "~53.0.23", + "expo-audio": "~0.4.9", "expo-av": "^15.1.7", "expo-blur": "~14.1.5", "expo-constants": "~17.1.7", @@ -85,7 +87,7 @@ }, "devDependencies": { "@babel/core": "^7.25.2", - "@react-native-community/cli": "^18.0.0", + "@react-native-community/cli": "^14.0.1", "@types/jest": "^29.5.12", "@types/react": "~19.0.10", "@types/react-test-renderer": "^18.3.0", diff --git a/src/components/BotChat/MessageBubble.tsx b/src/components/BotChat/MessageBubble.tsx index c6f9aad..23d3e42 100644 --- a/src/components/BotChat/MessageBubble.tsx +++ b/src/components/BotChat/MessageBubble.tsx @@ -5,6 +5,7 @@ import TaskListBubble from './TaskListBubble'; // Nuovo componente card-based import TaskTableBubble from './TaskTableBubble'; // Mantieni per backward compatibility import Markdown from 'react-native-markdown-display'; // Supporto per Markdown import WidgetBubble from './widgets/WidgetBubble'; +import InlineVisualizationWidget from './widgets/InlineVisualizationWidget'; import VisualizationModal from './widgets/VisualizationModal'; import ItemDetailModal from './widgets/ItemDetailModal'; import TaskEditModal from '../Task/TaskEditModal'; @@ -13,7 +14,7 @@ import CategoryMenu from '../Category/CategoryMenu'; import { Task as TaskType } from '../../services/taskService'; import { updateTask, updateCategory, deleteCategory } from '../../services/taskService'; -const MessageBubble: React.FC = ({ message, style }) => { +const MessageBubble: React.FC = ({ message, style, isVoiceChat = false }) => { const isBot = message.sender === 'bot'; const fadeAnim = useRef(new Animated.Value(0)).current; const slideAnim = useRef(new Animated.Value(20)).current; @@ -362,38 +363,55 @@ const MessageBubble: React.FC = ({ message, style }) => { {/* WIDGETS SOPRA AL MESSAGGIO (come richiesto dall'utente) */} {isBot && message.toolWidgets && message.toolWidgets.length > 0 && ( - {message.toolWidgets.map((widget) => ( - - ))} + {message.toolWidgets.map((widget) => { + // In voice chat usa InlineVisualizationWidget + if (isVoiceChat) { + return ( + + ); + } + + // In text chat usa WidgetBubble (comportamento attuale) + return ( + + ); + })} )} - {/* BUBBLE DEL MESSAGGIO */} - - {renderMessageContent()} - {message.isStreaming && isBot && ( - - - - - - )} - {message.modelType && isBot && !message.isStreaming && ( - - {message.modelType === 'advanced' ? 'Modello avanzato' : 'Modello base'} - - )} - + {/* BUBBLE DEL MESSAGGIO - renderizza solo se c'è testo */} + {message.text && message.text.trim() !== '' && ( + + {renderMessageContent()} + {message.isStreaming && isBot && ( + + + + + + )} + {message.modelType && isBot && !message.isStreaming && ( + + {message.modelType === 'advanced' ? 'Modello avanzato' : 'Modello base'} + + )} + + )} = ({ visible, onClose, - isRecording: externalIsRecording = false, onVoiceResponse, }) => { const { @@ -37,22 +44,150 @@ const VoiceChatModal: React.FC = ({ isRecording, isProcessing, isSpeaking, - isSpeechActive, + transcripts, + activeTools, + isMuted, connect, disconnect, - stopRecording, - cancelRecording, - stopPlayback, - sendControl, requestPermissions, + mute, + unmute, } = useVoiceChat(); + // Calendar state + const [selectedDate, setSelectedDate] = useState(dayjs().format('YYYY-MM-DD')); + const [calendarTasks, setCalendarTasks] = useState([]); + const cacheService = useRef(TaskCacheService.getInstance()).current; + + // Carica task per il calendario + const fetchCalendarTasks = useCallback(async () => { + try { + const cachedTasks = await cacheService.getCachedTasks(); + if (cachedTasks.length > 0) { + const incomplete = cachedTasks.filter(t => { + const s = t.status?.toLowerCase() || ''; + return s !== 'completato' && s !== 'completed' && s !== 'archiviato' && s !== 'archived'; + }); + setCalendarTasks(incomplete); + return; + } + const tasksData = await getAllTasks(true); + if (Array.isArray(tasksData)) { + const incomplete = tasksData.filter(t => { + const s = t.status?.toLowerCase() || ''; + return s !== 'completato' && s !== 'completed' && s !== 'archiviato' && s !== 'archived'; + }); + setCalendarTasks(incomplete); + } + } catch (error) { + console.error('[VoiceChatModal] Errore caricamento task calendario:', error); + } + }, [cacheService]); + + // Carica task quando il modal si apre + useEffect(() => { + if (visible) { + fetchCalendarTasks(); + } + }, [visible, fetchCalendarTasks]); + + // Ascolta eventi task per aggiornare il calendario + useEffect(() => { + const refresh = () => fetchCalendarTasks(); + eventEmitter.on(EVENTS.TASK_ADDED, refresh); + eventEmitter.on(EVENTS.TASK_UPDATED, refresh); + eventEmitter.on(EVENTS.TASK_DELETED, refresh); + return () => { + eventEmitter.off(EVENTS.TASK_ADDED, refresh); + eventEmitter.off(EVENTS.TASK_UPDATED, refresh); + eventEmitter.off(EVENTS.TASK_DELETED, refresh); + }; + }, [fetchCalendarTasks]); + + const goToPreviousMonth = () => { + setSelectedDate(prev => dayjs(prev).subtract(1, 'month').format('YYYY-MM-DD')); + }; + + const goToNextMonth = () => { + setSelectedDate(prev => dayjs(prev).add(1, 'month').format('YYYY-MM-DD')); + }; + + const selectDate = (date: string | null) => { + if (date) setSelectedDate(date); + }; + + // Task per la data selezionata (normalizzati con id/task_id) + const tasksForSelectedDate = calendarTasks.filter(task => { + if (!task.end_time) return false; + return dayjs(task.end_time).format('YYYY-MM-DD') === selectedDate; + }).map(task => { + if (!task.id && task.task_id) return { ...task, id: task.task_id }; + if (task.id && !task.task_id) return { ...task, task_id: task.id }; + return task; + }); + + // Task handlers + const handleTaskComplete = async (taskId: number | string) => { + try { + await completeTask(taskId); + fetchCalendarTasks(); + } catch (error) { + console.error('[VoiceChatModal] Errore completamento task:', error); + Alert.alert('Errore', 'Impossibile completare il task.'); + } + }; + + const handleTaskUncomplete = async (taskId: number | string) => { + try { + await disCompleteTask(taskId); + fetchCalendarTasks(); + } catch (error) { + console.error('[VoiceChatModal] Errore annullamento completamento:', error); + Alert.alert('Errore', 'Impossibile riaprire il task.'); + } + }; + + const handleTaskEdit = async (taskId: number | string, updatedTask: TaskType) => { + try { + await updateTask(taskId, updatedTask); + fetchCalendarTasks(); + } catch (error) { + console.error('[VoiceChatModal] Errore modifica task:', error); + Alert.alert('Errore', 'Impossibile modificare il task.'); + } + }; + + const handleTaskDelete = async (taskId: number | string) => { + try { + await deleteTask(taskId); + fetchCalendarTasks(); + } catch (error) { + console.error('[VoiceChatModal] Errore eliminazione task:', error); + Alert.alert('Errore', 'Impossibile eliminare il task.'); + } + }; + // Animazioni - const pulseScale = useRef(new Animated.Value(1)).current; - const pulseOpacity = useRef(new Animated.Value(0.3)).current; const slideIn = useRef(new Animated.Value(height)).current; const fadeIn = useRef(new Animated.Value(0)).current; - const recordingScale = useRef(new Animated.Value(1)).current; + const liveDotOpacity = useRef(new Animated.Value(1)).current; + + // Animated loading dots for smooth state transitions + const dot1Opacity = useRef(new Animated.Value(0.3)).current; + const dot2Opacity = useRef(new Animated.Value(0.3)).current; + const dot3Opacity = useRef(new Animated.Value(0.3)).current; + const stateTextOpacity = useRef(new Animated.Value(1)).current; + const prevStateRef = useRef(state); + + // Notifica trascrizioni assistant al parent + useEffect(() => { + if (onVoiceResponse && transcripts.length > 0) { + const last = transcripts[transcripts.length - 1]; + if (last.role === 'assistant') { + onVoiceResponse(last.content); + } + } + }, [transcripts, onVoiceResponse]); // Animazione di entrata del modal useEffect(() => { @@ -85,76 +220,75 @@ const VoiceChatModal: React.FC = ({ // Cleanup quando il modal si chiude useEffect(() => { if (!visible) { - if (isRecording) cancelRecording(); - if (isSpeaking) stopPlayback(); + // Usa disconnect che gestisce internamente il cleanup di registrazione e player disconnect(); } - }, [visible]); + }, [visible, disconnect]); - // Animazione del cerchio pulsante - solo quando in ascolto + // Loading dots sequential pulse animation + const isLoadingState = state === 'connecting' || state === 'authenticating' || state === 'setting_up' || state === 'processing'; useEffect(() => { - const shouldAnimate = isRecording && isSpeechActive; - - if (shouldAnimate) { - const pulseAnimation = Animated.loop( - Animated.sequence([ - Animated.parallel([ - Animated.timing(pulseScale, { - toValue: 1.15, - duration: 800, - useNativeDriver: true, - }), - Animated.timing(pulseOpacity, { - toValue: 0, - duration: 800, - useNativeDriver: true, - }), + if (isLoadingState) { + const animateDots = Animated.loop( + Animated.stagger(200, [ + Animated.sequence([ + Animated.timing(dot1Opacity, { toValue: 1, duration: 400, useNativeDriver: true }), + Animated.timing(dot1Opacity, { toValue: 0.3, duration: 400, useNativeDriver: true }), + ]), + Animated.sequence([ + Animated.timing(dot2Opacity, { toValue: 1, duration: 400, useNativeDriver: true }), + Animated.timing(dot2Opacity, { toValue: 0.3, duration: 400, useNativeDriver: true }), ]), - Animated.parallel([ - Animated.timing(pulseScale, { - toValue: 1, - duration: 800, - useNativeDriver: true, - }), - Animated.timing(pulseOpacity, { - toValue: 0.4, - duration: 800, - useNativeDriver: true, - }), + Animated.sequence([ + Animated.timing(dot3Opacity, { toValue: 1, duration: 400, useNativeDriver: true }), + Animated.timing(dot3Opacity, { toValue: 0.3, duration: 400, useNativeDriver: true }), ]), ]) ); - pulseAnimation.start(); + animateDots.start(); + return () => animateDots.stop(); + } else { + dot1Opacity.setValue(0.3); + dot2Opacity.setValue(0.3); + dot3Opacity.setValue(0.3); + } + }, [isLoadingState, dot1Opacity, dot2Opacity, dot3Opacity]); - return () => pulseAnimation.stop(); + // Smooth cross-fade when state changes + useEffect(() => { + if (prevStateRef.current !== state) { + prevStateRef.current = state; + stateTextOpacity.setValue(0); + Animated.timing(stateTextOpacity, { + toValue: 1, + duration: 250, + useNativeDriver: true, + }).start(); } - }, [isRecording, isSpeechActive, pulseScale, pulseOpacity]); + }, [state, stateTextOpacity]); - // Animazione durante elaborazione/risposta + // Live dot pulse animation useEffect(() => { - if (isProcessing || isSpeaking) { - const thinkingAnimation = Animated.loop( + if (isConnected) { + const dotPulse = Animated.loop( Animated.sequence([ - Animated.timing(recordingScale, { - toValue: 1.08, + Animated.timing(liveDotOpacity, { + toValue: 0.3, duration: 1000, useNativeDriver: true, }), - Animated.timing(recordingScale, { + Animated.timing(liveDotOpacity, { toValue: 1, duration: 1000, useNativeDriver: true, }), ]) ); - thinkingAnimation.start(); + dotPulse.start(); - return () => { - thinkingAnimation.stop(); - recordingScale.setValue(1); - }; + return () => dotPulse.stop(); } - }, [isProcessing, isSpeaking, recordingScale]); + }, [isConnected, liveDotOpacity]); // Gestione connessione const handleConnect = async () => { @@ -172,11 +306,8 @@ const VoiceChatModal: React.FC = ({ await connect(); }; - const handleClose = () => { - if (isRecording) cancelRecording(); - if (isSpeaking) stopPlayback(); - disconnect(); - + const handleClose = async () => { + // Avvia l'animazione di chiusura Animated.parallel([ Animated.timing(slideIn, { toValue: height, @@ -191,129 +322,55 @@ const VoiceChatModal: React.FC = ({ ]).start(() => { onClose(); }); - }; - - const handleErrorDismiss = () => { - if (state === 'error') { - handleConnect(); - } - }; - - // Render dello stato - versione minimale - const renderStateIndicator = () => { - if (state === 'connecting') { - return Connessione in corso...; - } - if (state === 'error') { - return Qualcosa è andato storto; - } - - if (isRecording && isSpeechActive) { - return Ti ascolto...; - } - - if (isProcessing || isSpeaking) { - return Sto pensando...; - } - - if (isConnected && !isRecording) { - return Parla quando vuoi; - } - - return null; + // Esegui il cleanup in parallelo all'animazione + // disconnect gestisce internamente il cleanup di registrazione e player + await disconnect(); }; - // Render del pulsante principale - versione minimale - const renderMainButton = () => { - // Stato: elaborazione o risposta in corso - if (isProcessing || isSpeaking) { - return ( - - - - ); - } - // Stato: connessione - if (state === 'connecting') { - return ( - - - - ); - } + // Render loading dots + const renderLoadingDots = () => ( + + + + + + ); - // Stato: errore - if (state === 'error') { - return ( - - - - - - ); + // Render dello stato con transizioni fluide + const renderStateIndicator = () => { + // Stati di caricamento: mostra solo i dots animati + if (isLoadingState) { + return renderLoadingDots(); } - // Stato: non connesso - if (!isConnected) { - return ( - - - - - - ); + // Stati interattivi: mostra testo con fade-in + let label: string | null = null; + switch (state) { + case 'error': + label = 'Qualcosa è andato storto'; + break; + case 'recording': + label = 'Ti ascolto...'; + break; + case 'speaking': + label = 'Rispondo...'; + break; + case 'ready': + label = 'Parla quando vuoi'; + break; + default: + return null; } - // Stato: ascolto attivo con animazione semplice - const isListening = isRecording && isSpeechActive; - return ( - - - + + {label} + ); }; - // Render pulsante di stop durante elaborazione/risposta - const renderStopButton = () => { - if (!isProcessing && !isSpeaking) { - return null; - } - - return ( - { - if (isSpeaking) stopPlayback(); - if (isProcessing) sendControl('cancel'); - }} - activeOpacity={0.7} - > - Interrompi - - ); - }; return ( = ({ statusBarTranslucent={true} onRequestClose={handleClose} > - + = ({ }, ]} > - {/* Header */} + {/* Header with Live indicator and Close button */} + {/* Live Indicator */} + {isConnected && ( + + + Live + + )} + + {/* Status Badge */} + + {renderStateIndicator()} + {isMuted && isConnected && ( + + + Muto + + )} + + + {/* Close Button */} - + - {/* Contenuto principale */} - - {/* Titolo minimale */} - Assistente Vocale - - {/* Messaggio di stato semplice */} - {renderStateIndicator()} - - {/* Cerchio animato centrale */} - - {/* Cerchi di pulsazione - solo quando in ascolto */} - {(isRecording && isSpeechActive) && ( - <> - - - - )} - - {/* Pulsante principale */} - {renderMainButton()} + {/* Calendar + Task List Section */} + + {/* Calendario in alto */} + + - {/* Pulsante stop durante elaborazione */} - {renderStopButton()} + {/* Header data selezionata */} + + + {dayjs(selectedDate).format('DD MMMM YYYY')} + + + {tasksForSelectedDate.length} {tasksForSelectedDate.length === 1 ? 'impegno' : 'impegni'} + + - {/* Messaggio di errore minimalista */} - {error && ( - - {error} - - Riprova - - - )} + {/* Lista task scrollabile */} + + {tasksForSelectedDate.length > 0 ? ( + tasksForSelectedDate.map(task => ( + + )) + ) : ( + + + Nessun impegno per questa data + + )} + - {/* Footer con istruzioni semplici */} - - - Parla naturalmente - + {/* Bottom Control Bar */} + + {/* Microphone Button - Primary */} + { + if (isMuted) { + unmute(); + } else { + mute(); + } + }} + disabled={!isConnected || state === 'connecting' || isProcessing || isSpeaking} + activeOpacity={0.8} + accessibilityRole="button" + accessibilityLabel={isMuted ? "Microfono disattivato" : "Microfono attivo"} + accessibilityState={{ selected: !isMuted }} + > + + @@ -413,137 +516,209 @@ const VoiceChatModal: React.FC = ({ const styles = StyleSheet.create({ overlay: { flex: 1, - backgroundColor: "rgba(0, 0, 0, 0.95)", + backgroundColor: "#FFFFFF", justifyContent: "space-between", }, header: { paddingTop: StatusBar.currentHeight ? StatusBar.currentHeight + 16 : 44, paddingHorizontal: 20, - paddingBottom: 16, - alignItems: "flex-end", - }, - closeButton: { - padding: 10, - borderRadius: 24, - backgroundColor: "rgba(255, 255, 255, 0.06)", + paddingBottom: 12, + flexDirection: "row", + justifyContent: "space-between", + alignItems: "center", + borderBottomWidth: 1, + borderBottomColor: "#E1E5E9", }, - content: { + headerCenter: { flex: 1, + flexDirection: "row", justifyContent: "center", alignItems: "center", - paddingHorizontal: 32, + gap: 8, + }, + liveIndicator: { + flexDirection: "row", + alignItems: "center", + backgroundColor: "rgba(52, 199, 89, 0.1)", + paddingHorizontal: 12, + paddingVertical: 6, + borderRadius: 16, + }, + liveDot: { + width: 8, + height: 8, + borderRadius: 4, + backgroundColor: "#34C759", + marginRight: 6, }, - title: { - fontSize: 26, - fontWeight: "200", - color: "#ffffff", - textAlign: "center", - marginBottom: 64, + liveText: { + fontSize: 13, + fontWeight: "600", + color: "#34C759", fontFamily: "System", - letterSpacing: 0.8, + }, + closeButton: { + width: 44, + height: 44, + borderRadius: 22, + backgroundColor: "#F8F9FA", + justifyContent: "center", + alignItems: "center", + borderWidth: 1, + borderColor: "#E1E5E9", }, subtleText: { - fontSize: 15, - fontWeight: "300", - color: "rgba(255, 255, 255, 0.5)", - textAlign: "center", - marginBottom: 52, + fontSize: 13, + fontWeight: "400", + color: "#666666", fontFamily: "System", }, - microphoneContainer: { - position: "relative", + loadingDots: { + flexDirection: "row", alignItems: "center", justifyContent: "center", - marginVertical: 48, - }, - pulseCircle: { - position: "absolute", - borderRadius: 150, - borderWidth: 1, - borderColor: "rgba(76, 175, 80, 0.4)", + gap: 6, + height: 18, }, - pulseCircle1: { - width: 240, - height: 240, + loadingDot: { + width: 6, + height: 6, + borderRadius: 3, + backgroundColor: "#999999", }, - pulseCircle2: { - width: 300, - height: 300, - }, - microphoneCircle: { - width: 160, - height: 160, - borderRadius: 80, - backgroundColor: "rgba(255, 255, 255, 0.08)", - justifyContent: "center", + mutedBadge: { + flexDirection: "row", alignItems: "center", - borderWidth: 1.5, - borderColor: "rgba(255, 255, 255, 0.15)", + backgroundColor: "rgba(255, 59, 48, 0.1)", + paddingHorizontal: 10, + paddingVertical: 4, + borderRadius: 12, + gap: 4, }, - listeningCircle: { - backgroundColor: "rgba(76, 175, 80, 0.15)", - borderColor: "rgba(76, 175, 80, 0.3)", + mutedBadgeText: { + fontSize: 11, + fontWeight: "600", + color: "#FF3B30", + fontFamily: "System", }, - thinkingCircle: { - backgroundColor: "rgba(33, 150, 243, 0.15)", - borderColor: "rgba(33, 150, 243, 0.3)", + // Calendar + Task List Section + calendarSection: { + flex: 1, + paddingHorizontal: 20, }, - microphoneButton: { - width: "100%", - height: "100%", - justifyContent: "center", - alignItems: "center", - borderRadius: 80, + calendarWrapper: { + paddingTop: 12, }, - footer: { - paddingHorizontal: 40, - paddingBottom: 64, + selectedDateHeader: { + flexDirection: "row", + justifyContent: "space-between", alignItems: "center", + paddingTop: 16, + paddingBottom: 12, + borderBottomWidth: 1, + borderBottomColor: "#E1E5E9", }, - footerText: { - fontSize: 12, + selectedDateTitle: { + fontSize: 16, fontWeight: "300", - color: "rgba(255, 255, 255, 0.35)", - textAlign: "center", + color: "#000000", fontFamily: "System", - letterSpacing: 0.3, - }, - stopButton: { - paddingHorizontal: 28, - paddingVertical: 14, - backgroundColor: "rgba(255, 255, 255, 0.08)", - borderRadius: 28, - marginTop: 40, + letterSpacing: -0.5, }, - stopButtonText: { - fontSize: 14, + taskCountLabel: { + fontSize: 13, fontWeight: "400", - color: "rgba(255, 255, 255, 0.75)", + color: "#999999", fontFamily: "System", - letterSpacing: 0.2, }, - errorContainer: { + taskListScroll: { + flex: 1, + }, + taskListContent: { + paddingTop: 4, + paddingBottom: 16, + paddingHorizontal: 4, + }, + emptyTaskList: { alignItems: "center", - paddingHorizontal: 28, - paddingVertical: 18, - backgroundColor: "rgba(244, 67, 54, 0.08)", - borderRadius: 20, - marginTop: 40, - maxWidth: "85%", + justifyContent: "center", + paddingTop: 40, }, - errorText: { - color: "rgba(255, 107, 107, 0.85)", - fontSize: 13, + emptyTaskText: { + fontSize: 14, fontWeight: "300", - textAlign: "center", - marginBottom: 14, + color: "#999999", fontFamily: "System", + marginTop: 10, }, - retryText: { - color: "rgba(255, 255, 255, 0.65)", - fontSize: 13, - fontWeight: "400", - fontFamily: "System", + // Control Bar + controlBar: { + flexDirection: "row", + justifyContent: "center", + alignItems: "center", + gap: 40, + paddingHorizontal: 32, + paddingVertical: 24, + paddingBottom: 40, + backgroundColor: "#FFFFFF", + borderTopWidth: 1, + borderTopColor: "#E1E5E9", + }, + controlButton: { + width: 56, + height: 56, + borderRadius: 28, + backgroundColor: "#F8F9FA", + justifyContent: "center", + alignItems: "center", + borderWidth: 1, + borderColor: "#E1E5E9", + }, + controlButtonDisabled: { + backgroundColor: "#F8F9FA", + opacity: 0.5, + }, + controlButtonActive: { + backgroundColor: "#E1E5E9", + }, + controlButtonPrimary: { + width: 64, + height: 64, + borderRadius: 32, + backgroundColor: "#000000", + borderWidth: 0, + ...Platform.select({ + ios: { + shadowColor: "#000000", + shadowOffset: { width: 0, height: 4 }, + shadowOpacity: 0.3, + shadowRadius: 8, + }, + android: { + elevation: 8, + }, + }), + }, + controlButtonRecording: { + backgroundColor: "#000000", + }, + controlButtonMuted: { + backgroundColor: "#E1E5E9", + }, + controlButtonEnd: { + backgroundColor: "#000000", + borderWidth: 0, + ...Platform.select({ + ios: { + shadowColor: "#000000", + shadowOffset: { width: 0, height: 4 }, + shadowOpacity: 0.3, + shadowRadius: 8, + }, + android: { + elevation: 6, + }, + }), }, }); diff --git a/src/components/BotChat/types.ts b/src/components/BotChat/types.ts index 86fb99f..0016ce4 100644 --- a/src/components/BotChat/types.ts +++ b/src/components/BotChat/types.ts @@ -47,6 +47,7 @@ export interface ChatSession { export interface MessageBubbleProps { message: Message; style?: StyleProp; + isVoiceChat?: boolean; // Flag per distinguere voice chat da text chat } // Props per il componente ChatInput @@ -90,9 +91,12 @@ export interface ToolWidget { // Output parsato dal tool MCP export interface ToolOutputData { type?: 'task_created' | 'category_created' | 'note_created' | - 'task_list' | 'category_list' | 'note_list'; + 'task_list' | 'category_list' | 'note_list' | 'text'; success?: boolean; message?: string; + text?: string; // Contenuto JSON stringificato (per voice chat MCP tools) + annotations?: any; + meta?: any; // Dati per tool di creazione task?: { diff --git a/src/components/BotChat/widgets/ErrorWidgetCard.tsx b/src/components/BotChat/widgets/ErrorWidgetCard.tsx new file mode 100644 index 0000000..a411d6b --- /dev/null +++ b/src/components/BotChat/widgets/ErrorWidgetCard.tsx @@ -0,0 +1,69 @@ +import React from 'react'; +import { View, Text, StyleSheet } from 'react-native'; +import { Ionicons } from '@expo/vector-icons'; +import { ToolWidget } from '../types'; + +interface ErrorWidgetCardProps { + widget: ToolWidget; +} + +/** + * Error card per tool widgets falliti in voice chat + * Mostra messaggio di errore con icona rossa + */ +const ErrorWidgetCard: React.FC = React.memo(({ widget }) => { + const errorMessage = widget.errorMessage || 'Errore durante l\'esecuzione'; + + // Determina il messaggio specifico in base al tool + let specificMessage = errorMessage; + if (widget.toolName === 'show_tasks_to_user') { + specificMessage = 'Impossibile recuperare le task'; + } else if (widget.toolName === 'show_categories_to_user') { + specificMessage = 'Impossibile recuperare le categorie'; + } + + return ( + + + + + + {specificMessage} + {widget.errorMessage && widget.errorMessage !== specificMessage && ( + {widget.errorMessage} + )} + + + ); +}); + +const styles = StyleSheet.create({ + container: { + flexDirection: 'row', + alignItems: 'center', + backgroundColor: '#FFE5E5', + borderRadius: 12, + padding: 12, + marginVertical: 8, + borderWidth: 1, + borderColor: '#FFCCCC', + }, + iconContainer: { + marginRight: 12, + }, + textContainer: { + flex: 1, + }, + errorTitle: { + fontSize: 14, + fontWeight: '600', + color: '#FF3B30', + marginBottom: 2, + }, + errorDetail: { + fontSize: 12, + color: '#CC0000', + }, +}); + +export default ErrorWidgetCard; diff --git a/src/components/BotChat/widgets/InlineCategoryList.tsx b/src/components/BotChat/widgets/InlineCategoryList.tsx new file mode 100644 index 0000000..5fc4fb1 --- /dev/null +++ b/src/components/BotChat/widgets/InlineCategoryList.tsx @@ -0,0 +1,143 @@ +import React, { useMemo } from 'react'; +import { View, Text, TouchableOpacity, StyleSheet } from 'react-native'; +import { Ionicons } from '@expo/vector-icons'; +import { ToolWidget, CategoryListItem } from '../types'; + +interface InlineCategoryListProps { + widget: ToolWidget; + onCategoryPress?: (category: CategoryListItem) => void; +} + +/** + * Lista completa di categorie inline per voice chat + * Mostra tutte le categorie come card semplificate + */ +const InlineCategoryList: React.FC = React.memo(({ widget, onCategoryPress }) => { + // Nessun output disponibile + if (!widget.toolOutput) { + return null; + } + + // Parse doppio: se toolOutput.text esiste, è una stringa JSON con i dati veri + let parsedData = widget.toolOutput; + if (widget.toolOutput.type === 'text' && widget.toolOutput.text) { + try { + parsedData = JSON.parse(widget.toolOutput.text); + } catch (e) { + console.error('[InlineCategoryList] Error parsing text field:', e); + } + } + + // Gestisci sia formato diretto che formato con type wrapper + let categories: CategoryListItem[] = []; + + if (parsedData.type === 'category_list' && parsedData.categories) { + // Formato con type wrapper (come text chat) + categories = parsedData.categories; + } else if (parsedData.categories) { + // Formato diretto + categories = parsedData.categories; + } + + // Lista vuota + if (categories.length === 0) { + return ( + + Nessuna categoria trovata + + ); + } + + return ( + + {categories.map((category) => { + const taskCount = category.taskCount || category.task_count || 0; + const categoryColor = category.color || '#666666'; + + return ( + onCategoryPress?.(category)} + activeOpacity={0.7} + > + {/* Color badge */} + + + {/* Category info */} + + + {category.name} + + + {taskCount} {taskCount === 1 ? 'task' : 'task'} + + + + {/* Shared badge */} + {category.isShared && ( + + + + )} + + {/* Arrow icon */} + + + ); + })} + + ); +}); + +const styles = StyleSheet.create({ + container: { + gap: 8, + marginVertical: 4, + }, + emptyContainer: { + backgroundColor: '#F5F5F5', + borderRadius: 12, + padding: 16, + alignItems: 'center', + marginVertical: 4, + }, + emptyText: { + fontSize: 14, + color: '#666666', + fontStyle: 'italic', + }, + categoryCard: { + flexDirection: 'row', + alignItems: 'center', + backgroundColor: '#FFFFFF', + borderRadius: 12, + padding: 12, + borderWidth: 1, + borderColor: '#E0E0E0', + gap: 12, + }, + colorBadge: { + width: 4, + height: 40, + borderRadius: 2, + }, + categoryInfo: { + flex: 1, + }, + categoryName: { + fontSize: 15, + fontWeight: '600', + color: '#000000', + marginBottom: 4, + }, + taskCount: { + fontSize: 13, + color: '#666666', + }, + sharedBadge: { + marginRight: 4, + }, +}); + +export default InlineCategoryList; diff --git a/src/components/BotChat/widgets/InlineTaskPreview.tsx b/src/components/BotChat/widgets/InlineTaskPreview.tsx new file mode 100644 index 0000000..da2ccb3 --- /dev/null +++ b/src/components/BotChat/widgets/InlineTaskPreview.tsx @@ -0,0 +1,128 @@ +import React from 'react'; +import { View, Text, StyleSheet } from 'react-native'; +import { ToolWidget, TaskListItem } from '../types'; +import { Task } from '../../../services/taskService'; +import TaskCard from '../../Task/TaskCard'; + +interface InlineTaskPreviewProps { + widget: ToolWidget; + onTaskPress?: (task: Task) => void; +} + +/** + * Preview inline di max 3 task per voice chat + * Mostra task cards con testo "+ N altre task" se ce ne sono di più + */ +const InlineTaskPreview: React.FC = React.memo(({ widget, onTaskPress }) => { + // Nessun output disponibile + if (!widget.toolOutput) { + return null; + } + + // Parse doppio: se toolOutput.text esiste, è una stringa JSON con i dati veri + let parsedData = widget.toolOutput; + if (widget.toolOutput.type === 'text' && widget.toolOutput.text) { + try { + parsedData = JSON.parse(widget.toolOutput.text); + } catch (e) { + console.error('[InlineTaskPreview] Error parsing text field:', e); + } + } + + // Gestisci sia formato diretto che formato con type wrapper + let tasks: TaskListItem[] = []; + + if (parsedData.type === 'task_list' && parsedData.tasks) { + // Formato con type wrapper (come text chat) + tasks = parsedData.tasks; + } else if (parsedData.tasks) { + // Formato diretto + tasks = parsedData.tasks; + } + + // Lista vuota + if (tasks.length === 0) { + return ( + + Nessuna task trovata + + ); + } + + // Converti TaskListItem → Task per TaskCard + const convertTaskListItemToTask = (item: TaskListItem): Task => { + return { + id: item.id, + title: item.title, + description: '', // TaskListItem non ha description + status: item.status || 'In corso', + priority: item.priority || 'Media', + category_id: undefined, + category_name: item.category || item.category_name, + start_time: item.end_time || item.endTimeFormatted, + end_time: item.end_time || item.endTimeFormatted, + created_at: new Date().toISOString(), + updated_at: new Date().toISOString(), + user_id: 0, + }; + }; + + const previewTasks = tasks.slice(0, 3); + const remainingCount = tasks.length - 3; + + return ( + + {previewTasks.map((taskItem) => { + const task = convertTaskListItemToTask(taskItem); + return ( + + ); + })} + + {remainingCount > 0 && ( + + + + {remainingCount} {remainingCount === 1 ? 'altra task' : 'altre task'} + + + )} + + ); +}); + +const styles = StyleSheet.create({ + container: { + gap: 8, + marginVertical: 4, + }, + emptyContainer: { + backgroundColor: '#F5F5F5', + borderRadius: 12, + padding: 16, + alignItems: 'center', + marginVertical: 4, + }, + emptyText: { + fontSize: 14, + color: '#666666', + fontStyle: 'italic', + }, + moreTasksContainer: { + backgroundColor: '#F0F0F0', + borderRadius: 8, + padding: 12, + alignItems: 'center', + marginTop: 4, + }, + moreTasksText: { + fontSize: 13, + color: '#666666', + fontWeight: '500', + }, +}); + +export default InlineTaskPreview; diff --git a/src/components/BotChat/widgets/InlineVisualizationWidget.tsx b/src/components/BotChat/widgets/InlineVisualizationWidget.tsx new file mode 100644 index 0000000..dc76bca --- /dev/null +++ b/src/components/BotChat/widgets/InlineVisualizationWidget.tsx @@ -0,0 +1,47 @@ +import React from 'react'; +import { ToolWidget, TaskListItem, CategoryListItem } from '../types'; +import { Task } from '../../../services/taskService'; +import LoadingSkeletonCard from './LoadingSkeletonCard'; +import ErrorWidgetCard from './ErrorWidgetCard'; +import InlineTaskPreview from './InlineTaskPreview'; +import InlineCategoryList from './InlineCategoryList'; + +interface InlineVisualizationWidgetProps { + widget: ToolWidget; + onTaskPress?: (task: Task) => void; + onCategoryPress?: (category: CategoryListItem) => void; +} + +/** + * Router per widget inline in voice chat + * Decide quale componente renderizzare in base allo stato del widget + */ +const InlineVisualizationWidget: React.FC = React.memo(({ + widget, + onTaskPress, + onCategoryPress, +}) => { + // Loading state + if (widget.status === 'loading' && !widget.toolOutput) { + return ; + } + + // Error state + if (widget.status === 'error') { + return ; + } + + // Success state - routing per tipo di tool + if (widget.toolName === 'show_tasks_to_user') { + return ; + } + + if (widget.toolName === 'show_categories_to_user') { + return ; + } + + // Tool non supportato per inline rendering + return null; +}); + +export default InlineVisualizationWidget; diff --git a/src/components/BotChat/widgets/LoadingSkeletonCard.tsx b/src/components/BotChat/widgets/LoadingSkeletonCard.tsx new file mode 100644 index 0000000..4630119 --- /dev/null +++ b/src/components/BotChat/widgets/LoadingSkeletonCard.tsx @@ -0,0 +1,192 @@ +import React, { useEffect, useRef } from 'react'; +import { View, Text, ActivityIndicator, Animated, StyleSheet } from 'react-native'; +import { Ionicons } from '@expo/vector-icons'; +import { ToolWidget } from '../types'; + +interface LoadingSkeletonCardProps { + widget: ToolWidget; +} + +/** + * Loading skeleton card per tool widgets in voice chat + * Mostra animazioni pulse + shimmer mentre il tool è in esecuzione + */ +const LoadingSkeletonCard: React.FC = React.memo(({ widget }) => { + const pulseAnim = useRef(new Animated.Value(0.3)).current; + const shimmerAnim = useRef(new Animated.Value(-1)).current; + + useEffect(() => { + // Animazione di pulsazione + const pulseAnimation = Animated.loop( + Animated.sequence([ + Animated.timing(pulseAnim, { + toValue: 1, + duration: 1000, + useNativeDriver: true, + }), + Animated.timing(pulseAnim, { + toValue: 0.3, + duration: 1000, + useNativeDriver: true, + }), + ]) + ); + + // Animazione shimmer + const shimmerAnimation = Animated.loop( + Animated.timing(shimmerAnim, { + toValue: 1, + duration: 1500, + useNativeDriver: true, + }) + ); + + pulseAnimation.start(); + shimmerAnimation.start(); + + return () => { + pulseAnimation.stop(); + shimmerAnimation.stop(); + }; + }, [pulseAnim, shimmerAnim]); + + // Determina il tipo di contenuto in base al tool name + let loadingText = 'Caricamento dati...'; + let icon: keyof typeof Ionicons.glyphMap = 'list'; + let skeletonCount = 2; + + if (widget.toolName === 'show_tasks_to_user') { + loadingText = 'Recupero task dal server...'; + icon = 'calendar-outline'; + skeletonCount = 3; + } else if (widget.toolName === 'show_categories_to_user') { + loadingText = 'Recupero categorie dal server...'; + icon = 'folder-outline'; + skeletonCount = 3; + } + + const shimmerTranslate = shimmerAnim.interpolate({ + inputRange: [-1, 1], + outputRange: [-200, 200], + }); + + return ( + + {/* Header con icona e testo */} + + + + + + {loadingText} + + + + + {/* Skeleton cards */} + + {Array.from({ length: skeletonCount }).map((_, i) => ( + + + + + + + + + + + + ))} + + + ); +}); + +const styles = StyleSheet.create({ + container: { + marginVertical: 8, + }, + header: { + flexDirection: 'row', + alignItems: 'center', + marginBottom: 12, + }, + iconContainer: { + width: 36, + height: 36, + borderRadius: 18, + backgroundColor: '#F0F0F0', + justifyContent: 'center', + alignItems: 'center', + marginRight: 12, + }, + textContainer: { + flex: 1, + flexDirection: 'row', + alignItems: 'center', + }, + loadingText: { + fontSize: 14, + color: '#666666', + fontWeight: '500', + marginRight: 8, + }, + spinner: { + marginLeft: 4, + }, + skeletonContainer: { + gap: 8, + }, + skeletonCard: { + backgroundColor: '#FFFFFF', + borderRadius: 12, + padding: 12, + borderWidth: 1, + borderColor: '#E0E0E0', + overflow: 'hidden', + }, + shimmerOverlay: { + position: 'absolute', + top: 0, + left: 0, + right: 0, + bottom: 0, + backgroundColor: 'rgba(255, 255, 255, 0.5)', + }, + skeletonContent: { + gap: 8, + }, + skeletonLine: { + height: 12, + borderRadius: 6, + backgroundColor: '#E0E0E0', + }, + skeletonTitle: { + width: '70%', + height: 16, + }, + skeletonSubtitle: { + width: '50%', + }, + skeletonMeta: { + flexDirection: 'row', + gap: 8, + marginTop: 4, + }, + skeletonBadge: { + width: 60, + height: 20, + borderRadius: 10, + backgroundColor: '#E0E0E0', + }, +}); + +export default LoadingSkeletonCard; diff --git a/src/components/Calendar20/AgendaView.tsx b/src/components/Calendar20/AgendaView.tsx new file mode 100644 index 0000000..d5c69b8 --- /dev/null +++ b/src/components/Calendar20/AgendaView.tsx @@ -0,0 +1,297 @@ +import React, { useMemo, useState, useCallback } from 'react'; +import { + View, + Text, + SectionList, + TouchableOpacity, + StyleSheet, +} from 'react-native'; +import dayjs from 'dayjs'; +import { Ionicons } from '@expo/vector-icons'; +import { CalendarTask } from './types'; +import { useTranslation } from 'react-i18next'; + +const INITIAL_DAYS = 30; +const LOAD_MORE_DAYS = 30; + +interface AgendaViewProps { + currentDate: dayjs.Dayjs; + tasks: CalendarTask[]; + onDatePress: (date: dayjs.Dayjs) => void; + onTaskPress: (task: CalendarTask) => void; + onToggleComplete: (task: CalendarTask) => void; + onSwipeLeft: () => void; + onSwipeRight: () => void; +} + +interface AgendaSection { + title: string; + date: dayjs.Dayjs; + isToday: boolean; + data: CalendarTask[]; +} + +const AgendaView: React.FC = ({ + currentDate, + tasks, + onDatePress, + onTaskPress, + onToggleComplete, +}) => { + const { t } = useTranslation(); + const [daysToShow, setDaysToShow] = useState(INITIAL_DAYS); + + const sections = useMemo((): AgendaSection[] => { + const today = dayjs(); + const result: AgendaSection[] = []; + + for (let i = 0; i < daysToShow; i++) { + const day = currentDate.add(i, 'day'); + const dayStr = day.format('YYYY-MM-DD'); + const dayTasks = tasks.filter(task => { + return ( + day.isSame(task.startDayjs, 'day') || + day.isSame(task.endDayjs, 'day') || + (day.isAfter(task.startDayjs, 'day') && day.isBefore(task.endDayjs, 'day')) + ); + }); + + // Sort by time + const sorted = [...dayTasks].sort((a, b) => { + if (a.isAllDay && !b.isAllDay) return -1; + if (!a.isAllDay && b.isAllDay) return 1; + return a.startDayjs.valueOf() - b.startDayjs.valueOf(); + }); + + const isToday = day.isSame(today, 'day'); + const title = isToday + ? `${t('calendar20.today')} - ${day.format('ddd, D MMM')}` + : day.format('ddd, D MMMM YYYY'); + + result.push({ + title, + date: day, + isToday, + data: sorted.length > 0 ? sorted : [{ _empty: true } as any], + }); + } + + return result; + }, [currentDate, tasks, daysToShow, t]); + + const loadMore = useCallback(() => { + setDaysToShow(prev => prev + LOAD_MORE_DAYS); + }, []); + + const renderItem = useCallback(({ item, section }: { item: CalendarTask; section: AgendaSection }) => { + if ((item as any)._empty) { + return ( + onDatePress(section.date.hour(12).minute(0).second(0))} + > + {t('calendar20.noEvents')} + + + ); + } + + const isCompleted = item.status?.toLowerCase() === 'completato' || item.status?.toLowerCase() === 'completed'; + const timeStr = item.isAllDay + ? t('calendar20.allDay') + : `${item.startDayjs.format('HH:mm')} - ${item.endDayjs.format('HH:mm')}`; + + return ( + onTaskPress(item)} + > + onToggleComplete(item)} + hitSlop={{ top: 8, bottom: 8, left: 8, right: 8 }} + style={styles.checkbox} + > + + + + + + + + {item.title} + + {timeStr} + + + {item.category_name && ( + + {item.category_name} + + )} + + ); + }, [onTaskPress, onToggleComplete, t]); + + const renderSectionHeader = useCallback(({ section }: { section: AgendaSection }) => ( + + + {section.title} + + onDatePress(section.date.hour(12).minute(0).second(0))} + hitSlop={{ top: 8, bottom: 8, left: 8, right: 8 }} + > + + + + ), [onDatePress]); + + return ( + item.task_id || item.id || `empty-${index}`} + stickySectionHeadersEnabled={false} + style={styles.container} + contentContainerStyle={styles.contentContainer} + onEndReached={loadMore} + onEndReachedThreshold={0.5} + ListFooterComponent={ + + + Load more + + } + /> + ); +}; + +const styles = StyleSheet.create({ + container: { + flex: 1, + }, + contentContainer: { + paddingBottom: 80, + paddingHorizontal: 16, + }, + sectionHeader: { + flexDirection: 'row', + alignItems: 'center', + justifyContent: 'space-between', + paddingHorizontal: 4, + paddingVertical: 14, + backgroundColor: 'transparent', + }, + todayHeader: { + backgroundColor: 'transparent', + }, + sectionTitle: { + flex: 1, + fontSize: 17, + fontWeight: '300', + color: '#000000', + fontFamily: 'System', + letterSpacing: -0.3, + }, + todayTitle: { + color: '#000000', + fontWeight: '500', + }, + addButton: { + padding: 4, + }, + taskRow: { + flexDirection: 'row', + alignItems: 'center', + paddingHorizontal: 16, + paddingVertical: 16, + marginBottom: 8, + backgroundColor: '#ffffff', + borderRadius: 16, + borderWidth: 1, + borderColor: '#e1e5e9', + shadowColor: '#000', + shadowOffset: { width: 0, height: 2 }, + shadowOpacity: 0.04, + shadowRadius: 8, + elevation: 1, + }, + checkbox: { + marginRight: 12, + }, + colorDot: { + width: 10, + height: 10, + borderRadius: 5, + marginRight: 14, + }, + taskContent: { + flex: 1, + }, + taskTitle: { + fontSize: 17, + fontWeight: '400', + color: '#000000', + fontFamily: 'System', + letterSpacing: -0.3, + }, + completedTitle: { + textDecorationLine: 'line-through', + color: '#999999', + }, + taskTime: { + fontSize: 14, + color: '#666666', + fontFamily: 'System', + marginTop: 4, + }, + categoryBadge: { + fontSize: 13, + fontWeight: '400', + fontFamily: 'System', + maxWidth: 90, + }, + emptyDay: { + paddingHorizontal: 16, + paddingVertical: 20, + marginBottom: 8, + backgroundColor: '#ffffff', + borderRadius: 16, + borderWidth: 1, + borderColor: '#f0f0f0', + alignItems: 'center', + justifyContent: 'center', + }, + emptyText: { + fontSize: 15, + color: '#cccccc', + fontFamily: 'System', + fontStyle: 'italic', + }, + loadMoreButton: { + flexDirection: 'row', + alignItems: 'center', + justifyContent: 'center', + paddingVertical: 24, + gap: 8, + }, + loadMoreText: { + fontSize: 16, + color: '#000000', + fontWeight: '400', + fontFamily: 'System', + }, +}); + +export default React.memo(AgendaView); diff --git a/src/components/Calendar20/Calendar20Modal.tsx b/src/components/Calendar20/Calendar20Modal.tsx new file mode 100644 index 0000000..4ee8507 --- /dev/null +++ b/src/components/Calendar20/Calendar20Modal.tsx @@ -0,0 +1,32 @@ +import React from 'react'; +import { Modal, StyleSheet, View } from 'react-native'; +import Calendar20View from './Calendar20View'; + +interface Calendar20ModalProps { + visible: boolean; + onClose: () => void; +} + +const Calendar20Modal: React.FC = ({ visible, onClose }) => { + return ( + + + + + + ); +}; + +const styles = StyleSheet.create({ + container: { + flex: 1, + backgroundColor: '#ffffff', + }, +}); + +export default Calendar20Modal; diff --git a/src/components/Calendar20/Calendar20View.tsx b/src/components/Calendar20/Calendar20View.tsx new file mode 100644 index 0000000..921ca08 --- /dev/null +++ b/src/components/Calendar20/Calendar20View.tsx @@ -0,0 +1,427 @@ +import React, { useState, useEffect, useCallback, useRef, useMemo } from 'react'; +import { View, StyleSheet, ActivityIndicator } from 'react-native'; +import dayjs from 'dayjs'; +import isoWeek from 'dayjs/plugin/isoWeek'; +import { Task, getAllTasks, getCategories, completeTask, disCompleteTask } from '../../services/taskService'; +import { TaskCacheService } from '../../services/TaskCacheService'; +import AppInitializer from '../../services/AppInitializer'; +import eventEmitter, { EVENTS } from '../../utils/eventEmitter'; +import { useFocusEffect } from '@react-navigation/native'; +import CategoryColorService from './categoryColors'; +import { CalendarViewType, CalendarTask } from './types'; +import TopBar from './TopBar'; +import MonthView from './MonthView'; +import WeekView from './WeekView'; +import ThreeDayView from './ThreeDayView'; +import DayView from './DayView'; +import AgendaView from './AgendaView'; +import MiniCalendar from './MiniCalendar'; +import ViewSelector from './ViewSelector'; +import SearchOverlay from './SearchOverlay'; +import FABMenu from './FABMenu'; +import AddTask from '../Task/AddTask'; +import AsyncStorage from '@react-native-async-storage/async-storage'; + +dayjs.extend(isoWeek); + +const VIEW_PREF_KEY = '@calendar20_view_pref'; + +interface Calendar20ViewProps { + onClose?: () => void; +} + +const Calendar20View: React.FC = ({ onClose }) => { + const [viewType, setViewType] = useState('month'); + const [currentDate, setCurrentDate] = useState(dayjs()); + const [rawTasks, setRawTasks] = useState([]); + const [categories, setCategories] = useState([]); + const [enabledCategories, setEnabledCategories] = useState>(new Set()); + const [isLoading, setIsLoading] = useState(true); + const [viewSelectorVisible, setViewSelectorVisible] = useState(false); + const [searchVisible, setSearchVisible] = useState(false); + const [miniCalendarVisible, setMiniCalendarVisible] = useState(false); + const [addTaskVisible, setAddTaskVisible] = useState(false); + const [selectedDateForTask, setSelectedDateForTask] = useState(null); + + const cacheService = useRef(TaskCacheService.getInstance()).current; + const appInitializer = useRef(AppInitializer.getInstance()).current; + const colorService = useRef(CategoryColorService.getInstance()).current; + + // Load saved view preference + useEffect(() => { + (async () => { + try { + const saved = await AsyncStorage.getItem(VIEW_PREF_KEY); + if (saved) setViewType(saved as CalendarViewType); + } catch {} + })(); + }, []); + + // Enrich tasks with colors and dayjs + const calendarTasks = useMemo((): CalendarTask[] => { + return rawTasks.map(task => { + const categoryName = task.category_name || ''; + const displayColor = colorService.getColor(categoryName); + const startDayjs = task.start_time ? dayjs(task.start_time) : dayjs(); + const endDayjs = task.end_time ? dayjs(task.end_time) : startDayjs; + const durationMinutes = endDayjs.diff(startDayjs, 'minute'); + const isMultiDay = !startDayjs.isSame(endDayjs, 'day'); + const isAllDay = durationMinutes >= 1440 || (!task.start_time && !!task.end_time); + + return { + ...task, + displayColor, + startDayjs, + endDayjs, + durationMinutes: Math.max(durationMinutes, 30), // min 30 min for display + isMultiDay, + isAllDay, + }; + }); + }, [rawTasks, colorService]); + + // Filtered tasks by enabled categories + const filteredTasks = useMemo(() => { + if (enabledCategories.size === 0) return calendarTasks; + return calendarTasks.filter(t => { + const cat = (t.category_name || '').toLowerCase().trim(); + return enabledCategories.has(cat); + }); + }, [calendarTasks, enabledCategories]); + + // Fetch tasks + const fetchTasks = useCallback(async () => { + try { + setIsLoading(true); + + // Load color service + await colorService.load(); + + // Try AppInitializer cache first + if (appInitializer.isDataReady()) { + const cachedTasks = await cacheService.getCachedTasks(); + if (cachedTasks.length > 0) { + setRawTasks(cachedTasks); + setIsLoading(false); + + // Load categories + const cats = await cacheService.getCachedCategories(); + setCategories(cats); + colorService.assignColors(cats.map((c: any) => c.name)); + return; + } + } + + // Wait for data + const dataReady = await appInitializer.waitForDataLoad(3000); + if (dataReady) { + const cachedTasks = await cacheService.getCachedTasks(); + if (cachedTasks.length > 0) { + setRawTasks(cachedTasks); + setIsLoading(false); + const cats = await cacheService.getCachedCategories(); + setCategories(cats); + colorService.assignColors(cats.map((c: any) => c.name)); + return; + } + } + + // Fallback to API + const [tasksData, catsData] = await Promise.all([ + getAllTasks(true), + getCategories(true), + ]); + if (Array.isArray(tasksData)) setRawTasks(tasksData); + if (Array.isArray(catsData)) { + setCategories(catsData); + colorService.assignColors(catsData.map((c: any) => c.name)); + } + } catch (error) { + console.error('[CALENDAR20] Error loading tasks:', error); + const cachedTasks = await cacheService.getCachedTasks(); + if (cachedTasks.length > 0) setRawTasks(cachedTasks); + } finally { + setIsLoading(false); + } + }, [cacheService, appInitializer, colorService]); + + // Initial load + useEffect(() => { + fetchTasks(); + }, [fetchTasks]); + + // Refresh on focus + useFocusEffect( + useCallback(() => { + fetchTasks(); + }, [fetchTasks]) + ); + + // Event emitter subscriptions + useEffect(() => { + const handleTaskAdded = (newTask: Task) => { + setRawTasks(prev => { + if (prev.some(t => (t.id === newTask.id) || (t.task_id === newTask.task_id))) return prev; + return [...prev, newTask]; + }); + }; + + const handleTaskUpdated = (updatedTask: Task) => { + setRawTasks(prev => + prev.map(t => { + const isMatch = + (t.id === updatedTask.id) || + (t.task_id === updatedTask.task_id) || + (updatedTask.id && t.task_id === updatedTask.id) || + (updatedTask.task_id && t.id === updatedTask.task_id); + return isMatch ? { ...t, ...updatedTask } : t; + }) + ); + }; + + const handleTaskDeleted = (taskId: string | number) => { + setRawTasks(prev => prev.filter(t => t.id !== taskId && t.task_id !== taskId)); + }; + + const handleTasksSynced = ({ tasks }: { tasks: Task[] }) => { + if (Array.isArray(tasks)) setRawTasks(tasks); + }; + + eventEmitter.on(EVENTS.TASK_ADDED, handleTaskAdded); + eventEmitter.on(EVENTS.TASK_UPDATED, handleTaskUpdated); + eventEmitter.on(EVENTS.TASK_DELETED, handleTaskDeleted); + eventEmitter.on(EVENTS.TASKS_SYNCED, handleTasksSynced); + + return () => { + eventEmitter.off(EVENTS.TASK_ADDED, handleTaskAdded); + eventEmitter.off(EVENTS.TASK_UPDATED, handleTaskUpdated); + eventEmitter.off(EVENTS.TASK_DELETED, handleTaskDeleted); + eventEmitter.off(EVENTS.TASKS_SYNCED, handleTasksSynced); + }; + }, []); + + // Navigation + const navigateDate = useCallback((direction: 'prev' | 'next') => { + setCurrentDate(prev => { + switch (viewType) { + case 'month': + return direction === 'next' ? prev.add(1, 'month') : prev.subtract(1, 'month'); + case 'week': + return direction === 'next' ? prev.add(1, 'week') : prev.subtract(1, 'week'); + case '3day': + return direction === 'next' ? prev.add(1, 'day') : prev.subtract(1, 'day'); + case 'day': + return direction === 'next' ? prev.add(1, 'day') : prev.subtract(1, 'day'); + case 'agenda': + return direction === 'next' ? prev.add(1, 'month') : prev.subtract(1, 'month'); + default: + return prev; + } + }); + }, [viewType]); + + const handleViewChange = useCallback(async (newView: CalendarViewType) => { + setViewType(newView); + setViewSelectorVisible(false); + try { + await AsyncStorage.setItem(VIEW_PREF_KEY, newView); + } catch {} + }, []); + + const handleDatePress = useCallback((date: dayjs.Dayjs) => { + // Apri il modal per creare un task con la data selezionata + setSelectedDateForTask(date); + setAddTaskVisible(true); + }, []); + + const handleTaskPress = useCallback((task: CalendarTask) => { + // Navigate to day view for the task + setCurrentDate(task.startDayjs); + if (viewType === 'month') { + setViewType('day'); + } + }, [viewType]); + + const handleToggleComplete = useCallback(async (task: CalendarTask) => { + const taskId = task.task_id || task.id; + const isCompleted = task.status?.toLowerCase() === 'completato' || task.status?.toLowerCase() === 'completed'; + try { + if (isCompleted) { + await disCompleteTask(taskId); + } else { + await completeTask(taskId); + } + } catch (error) { + console.error('[CALENDAR20] Error toggling task completion:', error); + } + }, []); + + const handleSaveTask = useCallback(async ( + title: string, + description: string, + dueDate: string, + priority: number, + categoryNameParam?: string + ) => { + const { addTask } = await import('../../services/taskService'); + const priorityString = priority === 1 ? 'Bassa' : priority === 2 ? 'Media' : 'Alta'; + const category = categoryNameParam || 'Calendario'; + // Usa la data selezionata se disponibile, altrimenti usa la data corrente + const taskDate = selectedDateForTask || currentDate; + const newTask: Task = { + title: title.trim(), + description: description || '', + start_time: taskDate.toISOString(), + end_time: new Date(dueDate).toISOString(), + priority: priorityString, + status: 'In sospeso', + category_name: category, + }; + try { + await addTask(newTask); + } catch (error) { + console.error('[CALENDAR20] Error adding task:', error); + } + setAddTaskVisible(false); + setSelectedDateForTask(null); + }, [currentDate, selectedDateForTask]); + + const handleCategoryToggle = useCallback((categoryName: string) => { + setEnabledCategories(prev => { + const next = new Set(prev); + const key = categoryName.toLowerCase().trim(); + if (next.has(key)) { + next.delete(key); + } else { + next.add(key); + } + // If all categories are deselected, show all + if (next.size === categories.length) { + return new Set(); + } + return next; + }); + }, [categories.length]); + + const handleShowAll = useCallback(() => { + setEnabledCategories(new Set()); + }, []); + + const renderView = () => { + const commonProps = { + currentDate, + tasks: filteredTasks, + onDatePress: handleDatePress, + onTaskPress: handleTaskPress, + onToggleComplete: handleToggleComplete, + onSwipeLeft: () => navigateDate('next'), + onSwipeRight: () => navigateDate('prev'), + }; + + switch (viewType) { + case 'month': + return ; + case 'week': + return ; + case '3day': + return ; + case 'day': + return ; + case 'agenda': + return ; + default: + return ; + } + }; + + if (isLoading) { + return ( + + + + ); + } + + return ( + + setViewSelectorVisible(true)} + onSearchPress={() => setSearchVisible(true)} + onTodayPress={() => setCurrentDate(dayjs())} + onTitlePress={() => setMiniCalendarVisible(true)} + onClose={onClose} + /> + + {renderView()} + + { + setSelectedDateForTask(null); + setAddTaskVisible(true); + }} + /> + + setViewSelectorVisible(false)} + /> + + { + setSearchVisible(false); + setCurrentDate(task.startDayjs); + setViewType('day'); + }} + onClose={() => setSearchVisible(false)} + /> + + { + setCurrentDate(date); + setMiniCalendarVisible(false); + }} + onClose={() => setMiniCalendarVisible(false)} + /> + + { + setAddTaskVisible(false); + setSelectedDateForTask(null); + }} + onSave={handleSaveTask} + allowCategorySelection={true} + categoryName="Calendario" + initialDate={(selectedDateForTask || currentDate).format('YYYY-MM-DD')} + /> + + ); +}; + +const styles = StyleSheet.create({ + container: { + flex: 1, + backgroundColor: '#ffffff', + }, + loadingContainer: { + flex: 1, + justifyContent: 'center', + alignItems: 'center', + backgroundColor: '#ffffff', + }, +}); + +export default Calendar20View; diff --git a/src/components/Calendar20/DayView.tsx b/src/components/Calendar20/DayView.tsx new file mode 100644 index 0000000..c335559 --- /dev/null +++ b/src/components/Calendar20/DayView.tsx @@ -0,0 +1,310 @@ +import React, { useMemo, useRef, useEffect } from 'react'; +import { + View, + Text, + ScrollView, + StyleSheet, + Dimensions, + PanResponder, + TouchableOpacity, +} from 'react-native'; +import dayjs from 'dayjs'; +import { CalendarTask, OverlapColumn } from './types'; +import TimeBlock from './TimeBlock'; +import EventChip from './EventChip'; +import { useTranslation } from 'react-i18next'; + +const HOUR_HEIGHT = 72; +const TIME_LABEL_WIDTH = 52; +const { width: SCREEN_WIDTH } = Dimensions.get('window'); +const COLUMN_WIDTH = SCREEN_WIDTH - TIME_LABEL_WIDTH - 16; + +interface DayViewProps { + currentDate: dayjs.Dayjs; + tasks: CalendarTask[]; + onDatePress: (date: dayjs.Dayjs) => void; + onTaskPress: (task: CalendarTask) => void; + onToggleComplete: (task: CalendarTask) => void; + onSwipeLeft: () => void; + onSwipeRight: () => void; +} + +function computeOverlapColumns(tasks: CalendarTask[]): OverlapColumn[] { + if (tasks.length === 0) return []; + + const sorted = [...tasks].sort((a, b) => { + const diff = a.startDayjs.valueOf() - b.startDayjs.valueOf(); + if (diff !== 0) return diff; + return b.durationMinutes - a.durationMinutes; + }); + + const columns: OverlapColumn[] = []; + const endTimes: number[] = []; // tracks end time per column + + for (const task of sorted) { + const start = task.startDayjs.valueOf(); + let placed = false; + + for (let col = 0; col < endTimes.length; col++) { + if (start >= endTimes[col]) { + endTimes[col] = task.endDayjs.valueOf(); + columns.push({ task, column: col, totalColumns: 0 }); + placed = true; + break; + } + } + + if (!placed) { + endTimes.push(task.endDayjs.valueOf()); + columns.push({ task, column: endTimes.length - 1, totalColumns: 0 }); + } + } + + // Compute totalColumns for each group of overlapping events + // Simple approach: set totalColumns to max column + 1 among overlapping peers + for (let i = 0; i < columns.length; i++) { + const entry = columns[i]; + const taskStart = entry.task.startDayjs.valueOf(); + const taskEnd = entry.task.endDayjs.valueOf(); + + let maxCol = entry.column; + for (let j = 0; j < columns.length; j++) { + if (i === j) continue; + const other = columns[j]; + const otherStart = other.task.startDayjs.valueOf(); + const otherEnd = other.task.endDayjs.valueOf(); + // Overlaps? + if (otherStart < taskEnd && otherEnd > taskStart) { + maxCol = Math.max(maxCol, other.column); + } + } + entry.totalColumns = maxCol + 1; + } + + return columns; +} + +const DayView: React.FC = ({ + currentDate, + tasks, + onDatePress, + onTaskPress, + onToggleComplete, + onSwipeLeft, + onSwipeRight, +}) => { + const { t } = useTranslation(); + const scrollRef = useRef(null); + + // Scroll to current time on mount + useEffect(() => { + const now = dayjs(); + if (currentDate.isSame(now, 'day')) { + const offset = Math.max(0, (now.hour() - 1) * HOUR_HEIGHT); + setTimeout(() => scrollRef.current?.scrollTo({ y: offset, animated: false }), 100); + } else { + setTimeout(() => scrollRef.current?.scrollTo({ y: 7 * HOUR_HEIGHT, animated: false }), 100); + } + }, [currentDate]); + + const swipeRef = useRef({ swiped: false }); + const panResponder = useRef( + PanResponder.create({ + onMoveShouldSetPanResponder: (_, gs) => Math.abs(gs.dx) > 20 && Math.abs(gs.dy) < 20, + onPanResponderGrant: () => { + swipeRef.current.swiped = false; + }, + onPanResponderRelease: (_, gs) => { + if (swipeRef.current.swiped) return; + if (gs.dx > 60) { + swipeRef.current.swiped = true; + onSwipeRight(); + } else if (gs.dx < -60) { + swipeRef.current.swiped = true; + onSwipeLeft(); + } + }, + }) + ).current; + + const dayTasks = useMemo(() => { + return tasks.filter(task => { + return ( + currentDate.isSame(task.startDayjs, 'day') || + currentDate.isSame(task.endDayjs, 'day') || + (currentDate.isAfter(task.startDayjs, 'day') && currentDate.isBefore(task.endDayjs, 'day')) + ); + }); + }, [tasks, currentDate]); + + const allDayTasks = useMemo(() => dayTasks.filter(t => t.isAllDay), [dayTasks]); + const timedTasks = useMemo(() => dayTasks.filter(t => !t.isAllDay), [dayTasks]); + const overlapColumns = useMemo(() => computeOverlapColumns(timedTasks), [timedTasks]); + + const now = dayjs(); + const isToday = currentDate.isSame(now, 'day'); + const currentTimeTop = isToday ? (now.hour() + now.minute() / 60) * HOUR_HEIGHT : -1; + + const hours = Array.from({ length: 24 }, (_, i) => i); + + return ( + + {/* All-day events */} + {allDayTasks.length > 0 && ( + + {t('calendar20.allDay')} + + {allDayTasks.map(task => ( + + ))} + + + )} + + + + {/* Time labels */} + + {hours.map(hour => ( + + + {hour.toString().padStart(2, '0')}:00 + + + ))} + + + {/* Events column */} + + {/* Clickable time slots (every 30 minutes) */} + {hours.map(hour => [0, 30].map(minute => { + const slotTime = currentDate.hour(hour).minute(minute).second(0); + return ( + onDatePress(slotTime)} + /> + ); + }))} + + {/* Hour grid lines */} + {hours.map(hour => ( + + ))} + + {/* Time blocks */} + {overlapColumns.map(({ task, column, totalColumns }) => ( + + ))} + + {/* Current time indicator */} + {isToday && currentTimeTop >= 0 && ( + + + + + )} + + + + + ); +}; + +const styles = StyleSheet.create({ + container: { + flex: 1, + }, + allDayContainer: { + flexDirection: 'row', + alignItems: 'center', + paddingHorizontal: 16, + paddingVertical: 8, + borderBottomWidth: StyleSheet.hairlineWidth, + borderBottomColor: '#e1e5e9', + backgroundColor: '#fafafa', + }, + allDayLabel: { + width: TIME_LABEL_WIDTH - 12, + fontSize: 12, + fontWeight: '400', + color: '#666666', + fontFamily: 'System', + }, + allDayChips: { + flex: 1, + flexDirection: 'row', + flexWrap: 'wrap', + gap: 6, + }, + scrollContainer: { + flex: 1, + }, + gridContainer: { + flexDirection: 'row', + paddingHorizontal: 8, + }, + timeColumn: { + width: TIME_LABEL_WIDTH, + }, + timeLabelSlot: { + justifyContent: 'flex-start', + }, + clickableSlot: { + position: 'absolute', + left: 0, + right: 0, + backgroundColor: 'transparent', + }, + timeLabel: { + fontSize: 12, + color: '#999999', + fontFamily: 'System', + fontWeight: '400', + marginTop: -7, + }, + eventsColumn: { + flex: 1, + height: 24 * HOUR_HEIGHT, + position: 'relative', + }, + hourLine: { + position: 'absolute', + left: 0, + right: 0, + height: StyleSheet.hairlineWidth, + backgroundColor: '#e1e5e9', + }, + currentTimeLine: { + position: 'absolute', + left: -6, + right: 0, + flexDirection: 'row', + alignItems: 'center', + zIndex: 10, + }, + currentTimeDot: { + width: 8, + height: 8, + borderRadius: 4, + backgroundColor: '#000000', + }, + currentTimeBar: { + flex: 1, + height: 1.5, + backgroundColor: '#000000', + }, +}); + +export default React.memo(DayView); diff --git a/src/components/Calendar20/EventChip.tsx b/src/components/Calendar20/EventChip.tsx new file mode 100644 index 0000000..9e994f8 --- /dev/null +++ b/src/components/Calendar20/EventChip.tsx @@ -0,0 +1,69 @@ +import React from 'react'; +import { View, Text, StyleSheet, TouchableOpacity } from 'react-native'; +import { CalendarTask } from './types'; + +interface EventChipProps { + task: CalendarTask; + onPress?: (task: CalendarTask) => void; + isSpanning?: boolean; + isStart?: boolean; + isEnd?: boolean; +} + +const EventChip: React.FC = ({ task, onPress, isSpanning, isStart = true, isEnd = true }) => { + const bgColor = task.displayColor || '#007AFF'; + const isCompleted = task.status?.toLowerCase() === 'completato' || task.status?.toLowerCase() === 'completed'; + + return ( + onPress?.(task)} + style={[ + styles.chip, + { backgroundColor: bgColor }, + isSpanning && !isStart && styles.spanningLeft, + isSpanning && !isEnd && styles.spanningRight, + isCompleted && styles.completed, + ]} + > + + {task.title} + + + ); +}; + +const styles = StyleSheet.create({ + chip: { + paddingHorizontal: 6, + paddingVertical: 3, + borderRadius: 6, + marginBottom: 2, + minHeight: 20, + justifyContent: 'center', + }, + spanningLeft: { + borderTopLeftRadius: 0, + borderBottomLeftRadius: 0, + marginLeft: -1, + }, + spanningRight: { + borderTopRightRadius: 0, + borderBottomRightRadius: 0, + marginRight: -1, + }, + chipText: { + color: '#ffffff', + fontSize: 12, + fontWeight: '400', + fontFamily: 'System', + }, + completed: { + opacity: 0.5, + }, + completedText: { + textDecorationLine: 'line-through', + }, +}); + +export default React.memo(EventChip); diff --git a/src/components/Calendar20/FABMenu.tsx b/src/components/Calendar20/FABMenu.tsx new file mode 100644 index 0000000..263f787 --- /dev/null +++ b/src/components/Calendar20/FABMenu.tsx @@ -0,0 +1,44 @@ +import React from 'react'; +import { + TouchableOpacity, + StyleSheet, +} from 'react-native'; +import { Ionicons } from '@expo/vector-icons'; + +interface FABMenuProps { + onNewTask: () => void; +} + +const FABMenu: React.FC = ({ onNewTask }) => { + return ( + + + + ); +}; + +const styles = StyleSheet.create({ + fab: { + position: 'absolute', + bottom: 24, + right: 20, + width: 60, + height: 60, + borderRadius: 30, + backgroundColor: '#000000', + alignItems: 'center', + justifyContent: 'center', + shadowColor: '#000', + shadowOffset: { width: 0, height: 4 }, + shadowOpacity: 0.08, + shadowRadius: 12, + elevation: 3, + zIndex: 100, + }, +}); + +export default React.memo(FABMenu); diff --git a/src/components/Calendar20/MiniCalendar.tsx b/src/components/Calendar20/MiniCalendar.tsx new file mode 100644 index 0000000..7158cfc --- /dev/null +++ b/src/components/Calendar20/MiniCalendar.tsx @@ -0,0 +1,213 @@ +import React, { useState, useMemo } from 'react'; +import { + View, + Text, + TouchableOpacity, + Modal, + StyleSheet, + Pressable, +} from 'react-native'; +import dayjs from 'dayjs'; +import isoWeek from 'dayjs/plugin/isoWeek'; +import { Ionicons } from '@expo/vector-icons'; +import { useTranslation } from 'react-i18next'; + +dayjs.extend(isoWeek); + +interface MiniCalendarProps { + visible: boolean; + currentDate: dayjs.Dayjs; + onDateSelect: (date: dayjs.Dayjs) => void; + onClose: () => void; +} + +const MiniCalendar: React.FC = ({ + visible, + currentDate, + onDateSelect, + onClose, +}) => { + const { t } = useTranslation(); + const [displayMonth, setDisplayMonth] = useState(currentDate); + + const weeks = useMemo(() => { + const startOfMonth = displayMonth.startOf('month'); + const endOfMonth = displayMonth.endOf('month'); + const startDate = startOfMonth.startOf('isoWeek'); + const endDate = endOfMonth.endOf('isoWeek'); + const today = dayjs(); + + const result: { date: dayjs.Dayjs; isCurrentMonth: boolean; isToday: boolean; isSelected: boolean }[][] = []; + let current = startDate; + + while (current.isBefore(endDate) || current.isSame(endDate, 'day')) { + const week: typeof result[0] = []; + for (let i = 0; i < 7; i++) { + week.push({ + date: current, + isCurrentMonth: current.month() === displayMonth.month(), + isToday: current.isSame(today, 'day'), + isSelected: current.isSame(currentDate, 'day'), + }); + current = current.add(1, 'day'); + } + result.push(week); + } + return result; + }, [displayMonth, currentDate]); + + const dayHeaders = useMemo(() => { + const days = ['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday']; + return days.map(d => t(`calendar.days.${d}`)); + }, [t]); + + if (!visible) return null; + + return ( + + + e.stopPropagation()}> + {/* Month navigation */} + + setDisplayMonth(p => p.subtract(1, 'month'))}> + + + {displayMonth.format('MMMM YYYY')} + setDisplayMonth(p => p.add(1, 'month'))}> + + + + + {/* Day headers */} + + {dayHeaders.map((d, i) => ( + {d.toUpperCase()} + ))} + + + {/* Weeks */} + {weeks.map((week, wi) => ( + + {week.map((day, di) => ( + onDateSelect(day.date)} + style={[ + styles.dayCell, + day.isSelected && styles.selectedCell, + day.isToday && !day.isSelected && styles.todayCell, + ]} + > + + {day.date.date()} + + + ))} + + ))} + + + + ); +}; + +const styles = StyleSheet.create({ + backdrop: { + flex: 1, + backgroundColor: 'rgba(0,0,0,0.4)', + justifyContent: 'flex-start', + paddingTop: 80, + alignItems: 'center', + }, + container: { + backgroundColor: '#ffffff', + borderRadius: 24, + padding: 20, + width: 320, + shadowColor: '#000', + shadowOffset: { width: 0, height: 4 }, + shadowOpacity: 0.08, + shadowRadius: 12, + elevation: 3, + }, + monthNav: { + flexDirection: 'row', + justifyContent: 'space-between', + alignItems: 'center', + marginBottom: 16, + }, + monthTitle: { + fontSize: 18, + fontWeight: '300', + color: '#000000', + fontFamily: 'System', + letterSpacing: -0.5, + }, + headerRow: { + flexDirection: 'row', + marginBottom: 8, + }, + headerText: { + flex: 1, + textAlign: 'center', + fontSize: 12, + fontWeight: '400', + color: '#999999', + fontFamily: 'System', + }, + weekRow: { + flexDirection: 'row', + }, + dayCell: { + flex: 1, + alignItems: 'center', + justifyContent: 'center', + paddingVertical: 8, + }, + selectedCell: { + backgroundColor: '#000000', + borderRadius: 14, + }, + todayCell: { + borderWidth: 1, + borderColor: '#000000', + borderRadius: 14, + }, + dayText: { + fontSize: 15, + fontWeight: '400', + color: '#000000', + fontFamily: 'System', + }, + otherMonthText: { + color: '#cccccc', + }, + selectedText: { + color: '#ffffff', + fontWeight: '600', + }, + todayText: { + color: '#000000', + fontWeight: '600', + }, + todayButton: { + marginTop: 16, + alignItems: 'center', + paddingVertical: 10, + }, + todayButtonText: { + fontSize: 15, + fontWeight: '400', + color: '#000000', + fontFamily: 'System', + }, +}); + +export default React.memo(MiniCalendar); diff --git a/src/components/Calendar20/MonthView.tsx b/src/components/Calendar20/MonthView.tsx new file mode 100644 index 0000000..aa5d017 --- /dev/null +++ b/src/components/Calendar20/MonthView.tsx @@ -0,0 +1,244 @@ +import React, { useMemo, useRef } from 'react'; +import { + View, + Text, + TouchableOpacity, + StyleSheet, + Dimensions, + PanResponder, +} from 'react-native'; +import dayjs from 'dayjs'; +import isoWeek from 'dayjs/plugin/isoWeek'; +import { CalendarTask, DayData } from './types'; +import EventChip from './EventChip'; +import { useTranslation } from 'react-i18next'; + +dayjs.extend(isoWeek); + +const { width: SCREEN_WIDTH } = Dimensions.get('window'); +const DAY_WIDTH = (SCREEN_WIDTH - 32) / 7; +const MAX_CHIPS = 2; + +interface MonthViewProps { + currentDate: dayjs.Dayjs; + tasks: CalendarTask[]; + onDatePress: (date: dayjs.Dayjs) => void; + onTaskPress: (task: CalendarTask) => void; + onSwipeLeft: () => void; + onSwipeRight: () => void; +} + +const MonthView: React.FC = ({ + currentDate, + tasks, + onDatePress, + onTaskPress, + onSwipeLeft, + onSwipeRight, +}) => { + const { t } = useTranslation(); + + const swipeRef = useRef({ x: 0, swiped: false }); + const panResponder = useRef( + PanResponder.create({ + onMoveShouldSetPanResponder: (_, gs) => Math.abs(gs.dx) > 15 && Math.abs(gs.dy) < 30, + onPanResponderGrant: (_, gs) => { + swipeRef.current = { x: gs.x0, swiped: false }; + }, + onPanResponderRelease: (_, gs) => { + if (swipeRef.current.swiped) return; + if (gs.dx > 60) { + swipeRef.current.swiped = true; + onSwipeRight(); + } else if (gs.dx < -60) { + swipeRef.current.swiped = true; + onSwipeLeft(); + } + }, + }) + ).current; + + const weeks = useMemo(() => { + const startOfMonth = currentDate.startOf('month'); + const endOfMonth = currentDate.endOf('month'); + // Start from Monday of the week containing the 1st + const startDate = startOfMonth.startOf('isoWeek'); + // End on Sunday of the week containing the last day + const endDate = endOfMonth.endOf('isoWeek'); + + const today = dayjs(); + const result: DayData[][] = []; + let current = startDate; + + while (current.isBefore(endDate) || current.isSame(endDate, 'day')) { + const week: DayData[] = []; + for (let i = 0; i < 7; i++) { + const dateStr = current.format('YYYY-MM-DD'); + const dayTasks = tasks.filter(task => { + if (!task.endDayjs && !task.startDayjs) return false; + const taskStart = task.startDayjs; + const taskEnd = task.endDayjs; + // Task falls on this day if the day is between start and end (inclusive) + return ( + current.isSame(taskEnd, 'day') || + current.isSame(taskStart, 'day') || + (current.isAfter(taskStart, 'day') && current.isBefore(taskEnd, 'day')) + ); + }); + + week.push({ + date: current, + dateString: dateStr, + isCurrentMonth: current.month() === currentDate.month(), + isToday: current.isSame(today, 'day'), + tasks: dayTasks, + }); + current = current.add(1, 'day'); + } + result.push(week); + } + return result; + }, [currentDate, tasks]); + + const dayHeaders = useMemo(() => { + const days = ['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday']; + return days.map(d => t(`calendar.days.${d}`)); + }, [t]); + + return ( + + {/* Day headers */} + + {dayHeaders.map((day, i) => ( + + + {day.toUpperCase()} + + + ))} + + + {/* Week rows */} + {weeks.map((week, wi) => ( + + {week.map((day, di) => { + const extraCount = Math.max(0, day.tasks.length - MAX_CHIPS); + return ( + onDatePress(day.date)} + > + + + {day.date.date()} + + + + {day.tasks.slice(0, MAX_CHIPS).map(task => ( + + ))} + {extraCount > 0 && ( + + {`+${extraCount}`} + + )} + + + ); + })} + + ))} + + ); +}; + +const styles = StyleSheet.create({ + container: { + flex: 1, + paddingHorizontal: 16, + }, + headerRow: { + flexDirection: 'row', + paddingVertical: 14, + borderBottomWidth: StyleSheet.hairlineWidth, + borderBottomColor: '#e1e5e9', + }, + headerCell: { + width: DAY_WIDTH, + alignItems: 'center', + }, + headerText: { + fontSize: 13, + fontWeight: '400', + color: '#666666', + fontFamily: 'System', + }, + weekendHeader: { + color: '#999999', + }, + weekRow: { + flexDirection: 'row', + flex: 1, + borderBottomWidth: StyleSheet.hairlineWidth, + borderBottomColor: '#f0f0f0', + paddingVertical: 2, + }, + dayCell: { + width: DAY_WIDTH, + paddingTop: 6, + paddingHorizontal: 1, + }, + dateCircle: { + width: 32, + height: 32, + borderRadius: 16, + alignItems: 'center', + justifyContent: 'center', + alignSelf: 'center', + marginBottom: 4, + }, + todayCircle: { + backgroundColor: '#000000', + }, + dateText: { + fontSize: 15, + fontWeight: '400', + color: '#000000', + fontFamily: 'System', + }, + otherMonthText: { + color: '#cccccc', + }, + todayText: { + color: '#ffffff', + fontWeight: '600', + }, + weekendText: { + color: '#999999', + }, + chipsContainer: { + flex: 1, + }, + moreText: { + fontSize: 12, + color: '#666666', + fontFamily: 'System', + textAlign: 'center', + marginTop: 2, + }, +}); + +export default React.memo(MonthView); diff --git a/src/components/Calendar20/SearchOverlay.tsx b/src/components/Calendar20/SearchOverlay.tsx new file mode 100644 index 0000000..30e4d58 --- /dev/null +++ b/src/components/Calendar20/SearchOverlay.tsx @@ -0,0 +1,173 @@ +import React, { useState, useMemo } from 'react'; +import { + View, + Text, + TextInput, + FlatList, + TouchableOpacity, + Modal, + StyleSheet, + Pressable, + Keyboard, +} from 'react-native'; +import { Ionicons } from '@expo/vector-icons'; +import { CalendarTask } from './types'; +import { useTranslation } from 'react-i18next'; +import TaskCard from '../Task/TaskCard'; +import { Task } from '../../services/taskService'; + +interface SearchOverlayProps { + visible: boolean; + tasks: CalendarTask[]; + onTaskPress: (task: CalendarTask) => void; + onClose: () => void; +} + +const SearchOverlay: React.FC = ({ + visible, + tasks, + onTaskPress, + onClose, +}) => { + const { t } = useTranslation(); + const [query, setQuery] = useState(''); + + const results = useMemo(() => { + if (!query.trim()) return []; + const q = query.toLowerCase().trim(); + return tasks + .filter(task => { + const title = (task.title || '').toLowerCase(); + const desc = (task.description || '').toLowerCase(); + const category = (task.category_name || '').toLowerCase(); + return title.includes(q) || desc.includes(q) || category.includes(q); + }) + .sort((a, b) => a.startDayjs.valueOf() - b.startDayjs.valueOf()) + .slice(0, 50); + }, [query, tasks]); + + const handleClose = () => { + setQuery(''); + Keyboard.dismiss(); + onClose(); + }; + + if (!visible) return null; + + return ( + + + e.stopPropagation()}> + {/* Search bar */} + + + + {query.length > 0 && ( + setQuery('')}> + + + )} + + {t('common.buttons.cancel')} + + + + {/* Results */} + {query.trim().length > 0 && results.length === 0 && ( + + + {t('calendar20.search.noResults')} + + )} + + (item.task_id || item.id || i).toString()} + renderItem={({ item }) => ( + { + handleClose(); + onTaskPress(item); + }} + /> + )} + keyboardShouldPersistTaps="handled" + style={styles.resultsList} + contentContainerStyle={styles.resultsContent} + /> + + + + ); +}; + +const styles = StyleSheet.create({ + backdrop: { + flex: 1, + backgroundColor: 'rgba(0,0,0,0.4)', + }, + container: { + flex: 1, + backgroundColor: '#ffffff', + marginTop: 48, + borderTopLeftRadius: 24, + borderTopRightRadius: 24, + overflow: 'hidden', + }, + searchBar: { + flexDirection: 'row', + alignItems: 'center', + paddingHorizontal: 20, + paddingVertical: 14, + borderBottomWidth: StyleSheet.hairlineWidth, + borderBottomColor: '#e1e5e9', + backgroundColor: '#fafafa', + }, + input: { + flex: 1, + fontSize: 17, + fontFamily: 'System', + color: '#000000', + marginLeft: 8, + marginRight: 8, + paddingVertical: 4, + }, + cancelButton: { + marginLeft: 8, + }, + cancelText: { + fontSize: 15, + fontWeight: '400', + color: '#000000', + fontFamily: 'System', + }, + resultsList: { + flex: 1, + }, + resultsContent: { + paddingHorizontal: 16, + paddingTop: 8, + }, + noResults: { + alignItems: 'center', + justifyContent: 'center', + paddingTop: 60, + }, + noResultsText: { + fontSize: 16, + color: '#999999', + fontFamily: 'System', + marginTop: 12, + }, +}); + +export default React.memo(SearchOverlay); diff --git a/src/components/Calendar20/ThreeDayView.tsx b/src/components/Calendar20/ThreeDayView.tsx new file mode 100644 index 0000000..3055204 --- /dev/null +++ b/src/components/Calendar20/ThreeDayView.tsx @@ -0,0 +1,319 @@ +import React, { useMemo, useRef, useEffect } from 'react'; +import { + View, + Text, + ScrollView, + StyleSheet, + Dimensions, + PanResponder, + TouchableOpacity, +} from 'react-native'; +import dayjs from 'dayjs'; +import { CalendarTask, OverlapColumn } from './types'; +import TimeBlock from './TimeBlock'; +import EventChip from './EventChip'; +import { useTranslation } from 'react-i18next'; + +const HOUR_HEIGHT = 72; +const TIME_LABEL_WIDTH = 48; +const { width: SCREEN_WIDTH } = Dimensions.get('window'); +const COLUMN_WIDTH = (SCREEN_WIDTH - TIME_LABEL_WIDTH - 16) / 3; + +interface ThreeDayViewProps { + currentDate: dayjs.Dayjs; + tasks: CalendarTask[]; + onDatePress: (date: dayjs.Dayjs) => void; + onTaskPress: (task: CalendarTask) => void; + onToggleComplete: (task: CalendarTask) => void; + onSwipeLeft: () => void; + onSwipeRight: () => void; +} + +function computeOverlapColumns(tasks: CalendarTask[]): OverlapColumn[] { + if (tasks.length === 0) return []; + const sorted = [...tasks].sort((a, b) => { + const diff = a.startDayjs.valueOf() - b.startDayjs.valueOf(); + return diff !== 0 ? diff : b.durationMinutes - a.durationMinutes; + }); + const columns: OverlapColumn[] = []; + const endTimes: number[] = []; + for (const task of sorted) { + const start = task.startDayjs.valueOf(); + let placed = false; + for (let col = 0; col < endTimes.length; col++) { + if (start >= endTimes[col]) { + endTimes[col] = task.endDayjs.valueOf(); + columns.push({ task, column: col, totalColumns: 0 }); + placed = true; + break; + } + } + if (!placed) { + endTimes.push(task.endDayjs.valueOf()); + columns.push({ task, column: endTimes.length - 1, totalColumns: 0 }); + } + } + for (let i = 0; i < columns.length; i++) { + const e = columns[i]; + const s = e.task.startDayjs.valueOf(); + const en = e.task.endDayjs.valueOf(); + let maxCol = e.column; + for (let j = 0; j < columns.length; j++) { + if (i === j) continue; + const o = columns[j]; + if (o.task.startDayjs.valueOf() < en && o.task.endDayjs.valueOf() > s) { + maxCol = Math.max(maxCol, o.column); + } + } + e.totalColumns = maxCol + 1; + } + return columns; +} + +const ThreeDayView: React.FC = ({ + currentDate, + tasks, + onDatePress, + onTaskPress, + onToggleComplete, + onSwipeLeft, + onSwipeRight, +}) => { + const { t } = useTranslation(); + const scrollRef = useRef(null); + + const swipeRef = useRef({ swiped: false }); + const panResponder = useRef( + PanResponder.create({ + onMoveShouldSetPanResponder: (_, gs) => Math.abs(gs.dx) > 20 && Math.abs(gs.dy) < 20, + onPanResponderGrant: () => { swipeRef.current.swiped = false; }, + onPanResponderRelease: (_, gs) => { + if (swipeRef.current.swiped) return; + if (gs.dx > 60) { swipeRef.current.swiped = true; onSwipeRight(); } + else if (gs.dx < -60) { swipeRef.current.swiped = true; onSwipeLeft(); } + }, + }) + ).current; + + const days = useMemo(() => { + return Array.from({ length: 3 }, (_, i) => currentDate.add(i, 'day')); + }, [currentDate]); + + const allDayTasksByDay = useMemo(() => { + return days.map(day => + tasks.filter(task => { + if (!task.isAllDay) return false; + return ( + day.isSame(task.startDayjs, 'day') || + day.isSame(task.endDayjs, 'day') || + (day.isAfter(task.startDayjs, 'day') && day.isBefore(task.endDayjs, 'day')) + ); + }) + ); + }, [days, tasks]); + + const hasAllDay = allDayTasksByDay.some(d => d.length > 0); + + const timedTasksByDay = useMemo(() => { + return days.map(day => { + const dayTasks = tasks.filter(task => { + if (task.isAllDay) return false; + return ( + day.isSame(task.startDayjs, 'day') || + day.isSame(task.endDayjs, 'day') || + (day.isAfter(task.startDayjs, 'day') && day.isBefore(task.endDayjs, 'day')) + ); + }); + return computeOverlapColumns(dayTasks); + }); + }, [days, tasks]); + + useEffect(() => { + const now = dayjs(); + const offset = Math.max(0, (now.hour() - 1) * HOUR_HEIGHT); + setTimeout(() => scrollRef.current?.scrollTo({ y: offset, animated: false }), 100); + }, [currentDate]); + + const now = dayjs(); + const hours = Array.from({ length: 24 }, (_, i) => i); + + return ( + + {/* Day headers */} + + + {days.map((day, i) => { + const isToday = day.isSame(now, 'day'); + return ( + + + {day.format('ddd').toUpperCase()} + + + + {day.date()} + + + + ); + })} + + + {/* All-day strip */} + {hasAllDay && ( + + + {t('calendar20.allDay')} + + {days.map((_, i) => ( + + {allDayTasksByDay[i].map(task => ( + + ))} + + ))} + + )} + + {/* Time grid */} + + + + {hours.map(hour => ( + + {hour.toString().padStart(2, '0')}:00 + + ))} + + + {days.map((day, dayIndex) => { + const isToday = day.isSame(now, 'day'); + const currentTimeTop = isToday ? (now.hour() + now.minute() / 60) * HOUR_HEIGHT : -1; + + return ( + + {/* Clickable time slots (every 30 minutes) */} + {hours.map(hour => [0, 30].map(minute => { + const slotTime = day.hour(hour).minute(minute).second(0); + return ( + onDatePress(slotTime)} + /> + ); + }))} + + {hours.map(hour => ( + + ))} + + {timedTasksByDay[dayIndex].map(({ task, column, totalColumns }) => ( + + ))} + + {isToday && currentTimeTop >= 0 && ( + + + + + )} + + ); + })} + + + + ); +}; + +const styles = StyleSheet.create({ + container: { flex: 1 }, + headerRow: { + flexDirection: 'row', + paddingHorizontal: 8, + paddingVertical: 10, + borderBottomWidth: StyleSheet.hairlineWidth, + borderBottomColor: '#e1e5e9', + }, + dayHeader: { alignItems: 'center' }, + dayName: { + fontSize: 13, + fontWeight: '400', + color: '#666666', + fontFamily: 'System', + }, + todayColor: { color: '#000000' }, + dateCircle: { + width: 28, + height: 28, + borderRadius: 14, + alignItems: 'center', + justifyContent: 'center', + marginTop: 4, + }, + todayCircle: { backgroundColor: '#000000' }, + dateNum: { fontSize: 18, fontWeight: '400', color: '#000000', fontFamily: 'System' }, + todayDateNum: { color: '#ffffff', fontWeight: '600' }, + allDayRow: { + flexDirection: 'row', + paddingHorizontal: 8, + paddingVertical: 6, + borderBottomWidth: StyleSheet.hairlineWidth, + borderBottomColor: '#e1e5e9', + backgroundColor: '#fafafa', + }, + allDayLabelText: { fontSize: 12, color: '#666666', fontFamily: 'System' }, + allDayCell: { paddingHorizontal: 1 }, + scrollContainer: { flex: 1 }, + gridContainer: { flexDirection: 'row', paddingHorizontal: 8 }, + dayColumn: { + position: 'relative', + borderLeftWidth: StyleSheet.hairlineWidth, + borderLeftColor: '#f0f0f0', + }, + clickableSlot: { + position: 'absolute', + left: 0, + right: 0, + backgroundColor: 'transparent', + }, + hourLine: { + position: 'absolute', + left: 0, + right: 0, + height: StyleSheet.hairlineWidth, + backgroundColor: '#e1e5e9', + }, + timeLabel: { fontSize: 12, color: '#999999', fontFamily: 'System', marginTop: -5 }, + currentTimeLine: { + position: 'absolute', + left: -2, + right: 0, + flexDirection: 'row', + alignItems: 'center', + zIndex: 10, + }, + currentTimeDot: { width: 6, height: 6, borderRadius: 3, backgroundColor: '#000000' }, + currentTimeBar: { flex: 1, height: 1.5, backgroundColor: '#000000' }, +}); + +export default React.memo(ThreeDayView); diff --git a/src/components/Calendar20/TimeBlock.tsx b/src/components/Calendar20/TimeBlock.tsx new file mode 100644 index 0000000..ad77e5b --- /dev/null +++ b/src/components/Calendar20/TimeBlock.tsx @@ -0,0 +1,123 @@ +import React from 'react'; +import { View, Text, TouchableOpacity, StyleSheet } from 'react-native'; +import { Ionicons } from '@expo/vector-icons'; +import { CalendarTask } from './types'; + +interface TimeBlockProps { + task: CalendarTask; + hourHeight: number; + column: number; + totalColumns: number; + columnWidth: number; + onPress?: (task: CalendarTask) => void; + onToggleComplete?: (task: CalendarTask) => void; +} + +const TimeBlock: React.FC = ({ + task, + hourHeight, + column, + totalColumns, + columnWidth, + onPress, + onToggleComplete, +}) => { + const startHour = task.startDayjs.hour() + task.startDayjs.minute() / 60; + const height = Math.max((task.durationMinutes / 60) * hourHeight, 24); + const top = startHour * hourHeight; + const width = columnWidth / totalColumns - 2; + const left = column * (columnWidth / totalColumns) + 1; + + const isCompleted = task.status?.toLowerCase() === 'completato' || task.status?.toLowerCase() === 'completed'; + const bgColor = task.displayColor || '#007AFF'; + + const startTime = task.startDayjs.format('HH:mm'); + const endTime = task.endDayjs.format('HH:mm'); + const showEndTime = height > 40; + + return ( + onPress?.(task)} + style={[ + styles.block, + { + top, + height, + left, + width, + backgroundColor: bgColor, + }, + isCompleted && styles.completed, + ]} + > + + + { + e.stopPropagation?.(); + onToggleComplete?.(task); + }} + hitSlop={{ top: 6, bottom: 6, left: 6, right: 6 }} + style={styles.checkboxArea} + > + + + + {task.title} + + + {showEndTime && ( + {startTime} - {endTime} + )} + + + ); +}; + +const styles = StyleSheet.create({ + block: { + position: 'absolute', + borderRadius: 10, + paddingHorizontal: 8, + paddingVertical: 4, + overflow: 'hidden', + borderLeftWidth: 3, + borderLeftColor: 'rgba(0,0,0,0.15)', + }, + completed: { + opacity: 0.5, + }, + content: { + flex: 1, + }, + header: { + flexDirection: 'row', + alignItems: 'center', + }, + checkboxArea: { + marginRight: 4, + }, + title: { + fontSize: 13, + fontWeight: '400', + color: '#ffffff', + fontFamily: 'System', + flex: 1, + }, + completedText: { + textDecorationLine: 'line-through', + }, + time: { + fontSize: 12, + color: 'rgba(255,255,255,0.8)', + fontFamily: 'System', + marginTop: 2, + }, +}); + +export default React.memo(TimeBlock); diff --git a/src/components/Calendar20/TopBar.tsx b/src/components/Calendar20/TopBar.tsx new file mode 100644 index 0000000..e909453 --- /dev/null +++ b/src/components/Calendar20/TopBar.tsx @@ -0,0 +1,135 @@ +import React from 'react'; +import { View, Text, TouchableOpacity, StyleSheet, Platform, StatusBar } from 'react-native'; +import { Ionicons } from '@expo/vector-icons'; +import { CalendarViewType } from './types'; +import { useTranslation } from 'react-i18next'; +import dayjs from 'dayjs'; + +interface TopBarProps { + currentDate: dayjs.Dayjs; + viewType: CalendarViewType; + onMenuPress: () => void; + onSearchPress: () => void; + onTodayPress: () => void; + onTitlePress: () => void; + onClose?: () => void; +} + +const TopBar: React.FC = ({ + currentDate, + viewType, + onMenuPress, + onSearchPress, + onTodayPress, + onTitlePress, + onClose, +}) => { + const { t } = useTranslation(); + + const getTitle = (): string => { + switch (viewType) { + case 'month': + return currentDate.format('MMMM YYYY'); + case 'week': + case '3day': { + const start = currentDate.startOf('week'); + const end = start.add(6, 'day'); + if (start.month() === end.month()) { + return `${start.format('D')} - ${end.format('D MMM YYYY')}`; + } + return `${start.format('D MMM')} - ${end.format('D MMM YYYY')}`; + } + case 'day': + return currentDate.format('ddd, D MMMM YYYY'); + case 'agenda': + return currentDate.format('MMMM YYYY'); + default: + return currentDate.format('MMMM YYYY'); + } + }; + + const isToday = currentDate.isSame(dayjs(), 'day'); + + return ( + + {onClose ? ( + + + + ) : ( + + + + )} + + + {getTitle()} + + + + + + + + {!isToday && ( + + {t('calendar20.today')} + + )} + + + ); +}; + +const styles = StyleSheet.create({ + container: { + flexDirection: 'row', + alignItems: 'center', + paddingTop: 8, + paddingBottom: 12, + paddingHorizontal: 20, + backgroundColor: '#ffffff', + borderBottomWidth: StyleSheet.hairlineWidth, + borderBottomColor: '#e1e5e9', + }, + iconButton: { + padding: 6, + }, + titleContainer: { + flex: 1, + flexDirection: 'row', + alignItems: 'center', + marginLeft: 14, + }, + title: { + fontSize: 26, + fontWeight: '200', + color: '#000000', + fontFamily: 'System', + letterSpacing: -1, + }, + chevron: { + marginLeft: 6, + marginTop: 2, + }, + rightActions: { + flexDirection: 'row', + alignItems: 'center', + gap: 10, + }, + todayButton: { + paddingHorizontal: 18, + paddingVertical: 8, + borderRadius: 20, + borderWidth: 1, + borderColor: '#000000', + }, + todayText: { + fontSize: 15, + fontWeight: '400', + color: '#000000', + fontFamily: 'System', + }, +}); + +export default React.memo(TopBar); diff --git a/src/components/Calendar20/ViewDrawer.tsx b/src/components/Calendar20/ViewDrawer.tsx new file mode 100644 index 0000000..81e3702 --- /dev/null +++ b/src/components/Calendar20/ViewDrawer.tsx @@ -0,0 +1,230 @@ +import React from 'react'; +import { + View, + Text, + TouchableOpacity, + Modal, + StyleSheet, + Pressable, + ScrollView, +} from 'react-native'; +import { Ionicons } from '@expo/vector-icons'; +import { CalendarViewType } from './types'; +import CategoryColorService from './categoryColors'; +import { useTranslation } from 'react-i18next'; + +interface ViewDrawerProps { + visible: boolean; + currentView: CalendarViewType; + categories: any[]; + enabledCategories: Set; + colorService: CategoryColorService; + onViewChange: (view: CalendarViewType) => void; + onCategoryToggle: (categoryName: string) => void; + onShowAll: () => void; + onClose: () => void; +} + +const VIEW_OPTIONS: { key: CalendarViewType; icon: keyof typeof Ionicons.glyphMap; labelKey: string }[] = [ + { key: 'month', icon: 'grid-outline', labelKey: 'calendar20.views.month' }, + { key: 'week', icon: 'calendar-outline', labelKey: 'calendar20.views.week' }, + { key: '3day', icon: 'albums-outline', labelKey: 'calendar20.views.threeDay' }, + { key: 'day', icon: 'today-outline', labelKey: 'calendar20.views.day' }, + { key: 'agenda', icon: 'list-outline', labelKey: 'calendar20.views.agenda' }, +]; + +const ViewDrawer: React.FC = ({ + visible, + currentView, + categories, + enabledCategories, + colorService, + onViewChange, + onCategoryToggle, + onShowAll, + onClose, +}) => { + const { t } = useTranslation(); + + if (!visible) return null; + + return ( + + + e.stopPropagation()}> + + {/* Views section */} + {t('calendar20.drawer.views').toUpperCase()} + {VIEW_OPTIONS.map(opt => { + const isActive = currentView === opt.key; + return ( + onViewChange(opt.key)} + > + + + {t(opt.labelKey)} + + {isActive && ( + + )} + + ); + })} + + {/* Categories section */} + {categories.length > 0 && ( + <> + + {t('calendar20.drawer.categories').toUpperCase()} + + + + {t('calendar20.drawer.showAll')} + + {enabledCategories.size === 0 && ( + + )} + + + {categories.map((cat, i) => { + const name = cat.name || cat.category_name || ''; + const color = colorService.getColor(name); + const key = name.toLowerCase().trim(); + const isEnabled = enabledCategories.size === 0 || enabledCategories.has(key); + + return ( + onCategoryToggle(name)} + > + + {isEnabled && ( + + )} + + + {name} + + + ); + })} + + )} + + + + + ); +}; + +const styles = StyleSheet.create({ + backdrop: { + flex: 1, + backgroundColor: 'rgba(0,0,0,0.4)', + flexDirection: 'row', + }, + drawer: { + width: 280, + backgroundColor: '#ffffff', + paddingVertical: 20, + paddingHorizontal: 20, + shadowColor: '#000', + shadowOffset: { width: 2, height: 0 }, + shadowOpacity: 0.08, + shadowRadius: 12, + elevation: 3, + borderTopRightRadius: 24, + borderBottomRightRadius: 24, + }, + sectionTitle: { + fontSize: 13, + fontWeight: '500', + color: '#999999', + fontFamily: 'System', + letterSpacing: 0.5, + marginBottom: 8, + marginTop: 8, + }, + viewOption: { + flexDirection: 'row', + alignItems: 'center', + paddingVertical: 12, + paddingHorizontal: 12, + borderRadius: 12, + marginBottom: 2, + }, + activeViewOption: { + backgroundColor: '#f0f0f0', + }, + viewLabel: { + fontSize: 16, + fontWeight: '400', + color: '#333333', + fontFamily: 'System', + marginLeft: 14, + flex: 1, + }, + activeViewLabel: { + color: '#000000', + fontWeight: '500', + }, + checkmark: { + marginLeft: 'auto', + }, + divider: { + height: StyleSheet.hairlineWidth, + backgroundColor: '#e1e5e9', + marginVertical: 16, + }, + showAllButton: { + flexDirection: 'row', + alignItems: 'center', + justifyContent: 'space-between', + paddingVertical: 10, + paddingHorizontal: 12, + marginBottom: 4, + }, + showAllText: { + fontSize: 15, + fontWeight: '400', + color: '#666666', + fontFamily: 'System', + }, + activeShowAll: { + color: '#000000', + fontWeight: '500', + }, + categoryRow: { + flexDirection: 'row', + alignItems: 'center', + paddingVertical: 10, + paddingHorizontal: 12, + }, + colorSquare: { + width: 22, + height: 22, + borderRadius: 6, + alignItems: 'center', + justifyContent: 'center', + marginRight: 12, + }, + categoryName: { + fontSize: 15, + fontWeight: '400', + color: '#333333', + fontFamily: 'System', + }, + disabledCategory: { + color: '#cccccc', + }, +}); + +export default React.memo(ViewDrawer); diff --git a/src/components/Calendar20/ViewSelector.tsx b/src/components/Calendar20/ViewSelector.tsx new file mode 100644 index 0000000..6be73d0 --- /dev/null +++ b/src/components/Calendar20/ViewSelector.tsx @@ -0,0 +1,336 @@ +import React, { useEffect, useRef, useState } from 'react'; +import { + View, + Text, + TouchableOpacity, + Modal, + StyleSheet, + Pressable, + ScrollView, + Animated, +} from 'react-native'; +import { Ionicons } from '@expo/vector-icons'; +import { CalendarViewType } from './types'; +import CategoryColorService from './categoryColors'; +import { useTranslation } from 'react-i18next'; + +interface ViewSelectorProps { + visible: boolean; + currentView: CalendarViewType; + categories: any[]; + enabledCategories: Set; + colorService: CategoryColorService; + onViewChange: (view: CalendarViewType) => void; + onCategoryToggle: (categoryName: string) => void; + onShowAll: () => void; + onClose: () => void; +} + +const VIEW_OPTIONS: { key: CalendarViewType; icon: keyof typeof Ionicons.glyphMap; labelKey: string }[] = [ + { key: 'month', icon: 'grid-outline', labelKey: 'calendar20.views.month' }, + { key: 'week', icon: 'calendar-outline', labelKey: 'calendar20.views.week' }, + { key: '3day', icon: 'albums-outline', labelKey: 'calendar20.views.threeDay' }, + { key: 'day', icon: 'today-outline', labelKey: 'calendar20.views.day' }, + { key: 'agenda', icon: 'list-outline', labelKey: 'calendar20.views.agenda' }, +]; + +const ViewSelector: React.FC = ({ + visible, + currentView, + categories, + enabledCategories, + colorService, + onViewChange, + onCategoryToggle, + onShowAll, + onClose, +}) => { + const { t } = useTranslation(); + const [modalVisible, setModalVisible] = useState(false); + const slideAnim = useRef(new Animated.Value(300)).current; + const fadeAnim = useRef(new Animated.Value(0)).current; + + useEffect(() => { + if (visible) { + setModalVisible(true); + Animated.parallel([ + Animated.spring(slideAnim, { + toValue: 0, + useNativeDriver: true, + tension: 65, + friction: 10, + }), + Animated.timing(fadeAnim, { + toValue: 1, + duration: 250, + useNativeDriver: true, + }), + ]).start(); + } else if (modalVisible) { + Animated.parallel([ + Animated.timing(slideAnim, { + toValue: 300, + duration: 250, + useNativeDriver: true, + }), + Animated.timing(fadeAnim, { + toValue: 0, + duration: 250, + useNativeDriver: true, + }), + ]).start(() => { + setModalVisible(false); + }); + } + }, [visible, modalVisible, slideAnim, fadeAnim]); + + const handleClose = () => { + onClose(); + }; + + if (!modalVisible) return null; + + return ( + + + + + + {t('calendar20.drawer.settings')} + + + + + + + {/* Views section */} + {t('calendar20.drawer.views').toUpperCase()} + + {VIEW_OPTIONS.map(opt => { + const isActive = currentView === opt.key; + return ( + onViewChange(opt.key)} + > + + + {t(opt.labelKey)} + + + ); + })} + + + {/* Categories section */} + {categories.length > 0 && ( + <> + + {t('calendar20.drawer.categories').toUpperCase()} + + + + {t('calendar20.drawer.showAll')} + + {enabledCategories.size === 0 && ( + + )} + + + + {categories.map((cat, i) => { + const name = cat.name || cat.category_name || ''; + const color = colorService.getColor(name); + const key = name.toLowerCase().trim(); + const isEnabled = enabledCategories.size === 0 || enabledCategories.has(key); + + return ( + onCategoryToggle(name)} + > + + + {name} + + {isEnabled && ( + + )} + + ); + })} + + + )} + + + + + ); +}; + +const styles = StyleSheet.create({ + backdrop: { + flex: 1, + backgroundColor: 'rgba(0,0,0,0.5)', + justifyContent: 'flex-end', + alignItems: 'stretch', + }, + panel: { + backgroundColor: '#ffffff', + borderTopLeftRadius: 24, + borderTopRightRadius: 24, + maxHeight: '80%', + paddingBottom: 40, + shadowColor: '#000', + shadowOffset: { width: 0, height: -4 }, + shadowOpacity: 0.1, + shadowRadius: 16, + elevation: 8, + }, + header: { + flexDirection: 'row', + alignItems: 'center', + justifyContent: 'space-between', + paddingHorizontal: 24, + paddingTop: 24, + paddingBottom: 16, + borderBottomWidth: StyleSheet.hairlineWidth, + borderBottomColor: '#e1e5e9', + }, + headerTitle: { + fontSize: 22, + fontWeight: '200', + color: '#000000', + fontFamily: 'System', + letterSpacing: -1, + }, + closeButton: { + padding: 4, + }, + scrollContent: { + paddingHorizontal: 24, + paddingTop: 20, + }, + sectionTitle: { + fontSize: 12, + fontWeight: '500', + color: '#999999', + fontFamily: 'System', + letterSpacing: 0.8, + marginBottom: 16, + marginTop: 8, + }, + viewGrid: { + flexDirection: 'row', + flexWrap: 'wrap', + gap: 12, + marginBottom: 8, + }, + viewCard: { + width: '30%', + aspectRatio: 1.2, + backgroundColor: '#f8f9fa', + borderRadius: 16, + alignItems: 'center', + justifyContent: 'center', + borderWidth: 1.5, + borderColor: '#e1e5e9', + paddingVertical: 12, + gap: 8, + }, + activeViewCard: { + backgroundColor: '#f0f7ff', + borderColor: '#007AFF', + }, + viewLabel: { + fontSize: 14, + fontWeight: '400', + color: '#666666', + fontFamily: 'System', + }, + activeViewLabel: { + color: '#007AFF', + fontWeight: '500', + }, + divider: { + height: StyleSheet.hairlineWidth, + backgroundColor: '#e1e5e9', + marginVertical: 24, + }, + showAllButton: { + flexDirection: 'row', + alignItems: 'center', + justifyContent: 'space-between', + paddingVertical: 12, + paddingHorizontal: 16, + backgroundColor: '#f8f9fa', + borderRadius: 12, + marginBottom: 16, + }, + showAllText: { + fontSize: 15, + fontWeight: '400', + color: '#666666', + fontFamily: 'System', + }, + activeShowAll: { + color: '#007AFF', + fontWeight: '500', + }, + categoriesGrid: { + flexDirection: 'row', + flexWrap: 'wrap', + gap: 10, + }, + categoryCard: { + flexDirection: 'row', + alignItems: 'center', + backgroundColor: '#ffffff', + paddingHorizontal: 12, + paddingVertical: 10, + borderRadius: 12, + borderWidth: 1, + borderColor: '#e1e5e9', + minWidth: '45%', + maxWidth: '48%', + }, + disabledCategoryCard: { + backgroundColor: '#f8f9fa', + opacity: 0.6, + }, + colorDot: { + width: 12, + height: 12, + borderRadius: 6, + marginRight: 8, + }, + categoryName: { + fontSize: 14, + fontWeight: '400', + color: '#333333', + fontFamily: 'System', + flex: 1, + }, + disabledCategory: { + color: '#999999', + }, + categoryCheck: { + marginLeft: 4, + }, +}); + +export default React.memo(ViewSelector); diff --git a/src/components/Calendar20/WeekView.tsx b/src/components/Calendar20/WeekView.tsx new file mode 100644 index 0000000..614c1bd --- /dev/null +++ b/src/components/Calendar20/WeekView.tsx @@ -0,0 +1,412 @@ +import React, { useMemo, useRef, useEffect, useState } from 'react'; +import { + View, + Text, + ScrollView, + StyleSheet, + Dimensions, + PanResponder, + TouchableOpacity, +} from 'react-native'; +import dayjs from 'dayjs'; +import isoWeek from 'dayjs/plugin/isoWeek'; +import { CalendarTask, OverlapColumn } from './types'; +import TimeBlock from './TimeBlock'; +import EventChip from './EventChip'; +import { useTranslation } from 'react-i18next'; + +dayjs.extend(isoWeek); + +const TIME_LABEL_WIDTH = 44; +const { width: SCREEN_WIDTH } = Dimensions.get('window'); +const COLUMN_WIDTH = (SCREEN_WIDTH - TIME_LABEL_WIDTH - 16) / 7; +const MIN_HOUR_HEIGHT = 40; +const MAX_HOUR_HEIGHT = 140; +const DEFAULT_HOUR_HEIGHT = 64; + +interface WeekViewProps { + currentDate: dayjs.Dayjs; + tasks: CalendarTask[]; + onDatePress: (date: dayjs.Dayjs) => void; + onTaskPress: (task: CalendarTask) => void; + onToggleComplete: (task: CalendarTask) => void; + onSwipeLeft: () => void; + onSwipeRight: () => void; +} + +function computeOverlapColumns(tasks: CalendarTask[]): OverlapColumn[] { + if (tasks.length === 0) return []; + const sorted = [...tasks].sort((a, b) => { + const diff = a.startDayjs.valueOf() - b.startDayjs.valueOf(); + return diff !== 0 ? diff : b.durationMinutes - a.durationMinutes; + }); + const columns: OverlapColumn[] = []; + const endTimes: number[] = []; + for (const task of sorted) { + const start = task.startDayjs.valueOf(); + let placed = false; + for (let col = 0; col < endTimes.length; col++) { + if (start >= endTimes[col]) { + endTimes[col] = task.endDayjs.valueOf(); + columns.push({ task, column: col, totalColumns: 0 }); + placed = true; + break; + } + } + if (!placed) { + endTimes.push(task.endDayjs.valueOf()); + columns.push({ task, column: endTimes.length - 1, totalColumns: 0 }); + } + } + for (let i = 0; i < columns.length; i++) { + const e = columns[i]; + const s = e.task.startDayjs.valueOf(); + const en = e.task.endDayjs.valueOf(); + let maxCol = e.column; + for (let j = 0; j < columns.length; j++) { + if (i === j) continue; + const o = columns[j]; + if (o.task.startDayjs.valueOf() < en && o.task.endDayjs.valueOf() > s) { + maxCol = Math.max(maxCol, o.column); + } + } + e.totalColumns = maxCol + 1; + } + return columns; +} + +const WeekView: React.FC = ({ + currentDate, + tasks, + onDatePress, + onTaskPress, + onToggleComplete, + onSwipeLeft, + onSwipeRight, +}) => { + const { t } = useTranslation(); + const scrollRef = useRef(null); + const [hourHeight, setHourHeight] = useState(DEFAULT_HOUR_HEIGHT); + + // Pinch-to-zoom + const initialDistance = useRef(0); + const initialHeight = useRef(DEFAULT_HOUR_HEIGHT); + + const panResponder = useRef( + PanResponder.create({ + onMoveShouldSetPanResponder: (evt, gs) => { + // 2-finger pinch + if (evt.nativeEvent.touches?.length === 2) return true; + // Horizontal swipe + return Math.abs(gs.dx) > 20 && Math.abs(gs.dy) < 20; + }, + onPanResponderGrant: (evt) => { + if (evt.nativeEvent.touches?.length === 2) { + const t1 = evt.nativeEvent.touches[0]; + const t2 = evt.nativeEvent.touches[1]; + initialDistance.current = Math.hypot(t2.pageX - t1.pageX, t2.pageY - t1.pageY); + initialHeight.current = hourHeight; + } + }, + onPanResponderMove: (evt) => { + if (evt.nativeEvent.touches?.length === 2) { + const t1 = evt.nativeEvent.touches[0]; + const t2 = evt.nativeEvent.touches[1]; + const dist = Math.hypot(t2.pageX - t1.pageX, t2.pageY - t1.pageY); + const scale = dist / (initialDistance.current || 1); + const newHeight = Math.min(MAX_HOUR_HEIGHT, Math.max(MIN_HOUR_HEIGHT, initialHeight.current * scale)); + setHourHeight(newHeight); + } + }, + onPanResponderRelease: (_, gs) => { + if (Math.abs(gs.dx) > 60 && Math.abs(gs.dy) < 40) { + if (gs.dx > 0) onSwipeRight(); + else onSwipeLeft(); + } + }, + }) + ).current; + + // Week days + const weekDays = useMemo(() => { + const startOfWeek = currentDate.startOf('isoWeek'); + return Array.from({ length: 7 }, (_, i) => startOfWeek.add(i, 'day')); + }, [currentDate]); + + // All-day tasks per day + const allDayTasksByDay = useMemo(() => { + return weekDays.map(day => { + return tasks.filter(task => { + if (!task.isAllDay) return false; + return ( + day.isSame(task.startDayjs, 'day') || + day.isSame(task.endDayjs, 'day') || + (day.isAfter(task.startDayjs, 'day') && day.isBefore(task.endDayjs, 'day')) + ); + }); + }); + }, [weekDays, tasks]); + + const hasAllDay = allDayTasksByDay.some(d => d.length > 0); + + // Timed tasks per day + const timedTasksByDay = useMemo(() => { + return weekDays.map(day => { + const dayTasks = tasks.filter(task => { + if (task.isAllDay) return false; + return ( + day.isSame(task.startDayjs, 'day') || + day.isSame(task.endDayjs, 'day') || + (day.isAfter(task.startDayjs, 'day') && day.isBefore(task.endDayjs, 'day')) + ); + }); + return computeOverlapColumns(dayTasks); + }); + }, [weekDays, tasks]); + + // Scroll to current time + useEffect(() => { + const now = dayjs(); + const offset = Math.max(0, (now.hour() - 1) * hourHeight); + setTimeout(() => scrollRef.current?.scrollTo({ y: offset, animated: false }), 100); + }, [currentDate, hourHeight]); + + const now = dayjs(); + const hours = Array.from({ length: 24 }, (_, i) => i); + + return ( + + {/* Day headers */} + + + {weekDays.map((day, i) => { + const isToday = day.isSame(now, 'day'); + return ( + + + {day.format('ddd').toUpperCase()} + + + + {day.date()} + + + + ); + })} + + + {/* All-day strip */} + {hasAllDay && ( + + + {t('calendar20.allDay')} + + {weekDays.map((day, i) => ( + + {allDayTasksByDay[i].map(task => ( + + ))} + + ))} + + )} + + {/* Time grid */} + + + {/* Time labels */} + + {hours.map(hour => ( + + {hour.toString().padStart(2, '0')}:00 + + ))} + + + {/* Day columns */} + {weekDays.map((day, dayIndex) => { + const isToday = day.isSame(now, 'day'); + const currentTimeTop = isToday ? (now.hour() + now.minute() / 60) * hourHeight : -1; + + return ( + + {/* Clickable time slots (every 30 minutes) */} + {hours.map(hour => [0, 30].map(minute => { + const slotTime = day.hour(hour).minute(minute).second(0); + return ( + onDatePress(slotTime)} + /> + ); + }))} + + {/* Hour lines */} + {hours.map(hour => ( + + ))} + + {/* Time blocks */} + {timedTasksByDay[dayIndex].map(({ task, column, totalColumns }) => ( + + ))} + + {/* Current time */} + {isToday && currentTimeTop >= 0 && ( + + + + + )} + + ); + })} + + + + ); +}; + +const styles = StyleSheet.create({ + container: { + flex: 1, + }, + headerRow: { + flexDirection: 'row', + paddingHorizontal: 8, + paddingVertical: 10, + borderBottomWidth: StyleSheet.hairlineWidth, + borderBottomColor: '#e1e5e9', + }, + dayHeader: { + alignItems: 'center', + }, + dayName: { + fontSize: 12, + fontWeight: '400', + color: '#666666', + fontFamily: 'System', + }, + todayColor: { + color: '#000000', + }, + dateCircle: { + width: 28, + height: 28, + borderRadius: 14, + alignItems: 'center', + justifyContent: 'center', + marginTop: 4, + }, + todayCircle: { + backgroundColor: '#000000', + }, + dateNum: { + fontSize: 16, + fontWeight: '400', + color: '#000000', + fontFamily: 'System', + }, + todayDateNum: { + color: '#ffffff', + fontWeight: '600', + }, + allDayRow: { + flexDirection: 'row', + paddingHorizontal: 8, + paddingVertical: 6, + borderBottomWidth: StyleSheet.hairlineWidth, + borderBottomColor: '#e1e5e9', + backgroundColor: '#fafafa', + }, + allDayLabel: { + justifyContent: 'center', + }, + allDayLabelText: { + fontSize: 12, + color: '#666666', + fontFamily: 'System', + }, + allDayCell: { + paddingHorizontal: 1, + }, + scrollContainer: { + flex: 1, + }, + gridContainer: { + flexDirection: 'row', + paddingHorizontal: 8, + }, + dayColumn: { + position: 'relative', + borderLeftWidth: StyleSheet.hairlineWidth, + borderLeftColor: '#f0f0f0', + }, + clickableSlot: { + position: 'absolute', + left: 0, + right: 0, + backgroundColor: 'transparent', + }, + hourLine: { + position: 'absolute', + left: 0, + right: 0, + height: StyleSheet.hairlineWidth, + backgroundColor: '#e1e5e9', + }, + timeLabel: { + fontSize: 11, + color: '#999999', + fontFamily: 'System', + marginTop: -5, + }, + currentTimeLine: { + position: 'absolute', + left: -2, + right: 0, + flexDirection: 'row', + alignItems: 'center', + zIndex: 10, + }, + currentTimeDot: { + width: 6, + height: 6, + borderRadius: 3, + backgroundColor: '#000000', + }, + currentTimeBar: { + flex: 1, + height: 1.5, + backgroundColor: '#000000', + }, +}); + +export default React.memo(WeekView); diff --git a/src/components/Calendar20/categoryColors.ts b/src/components/Calendar20/categoryColors.ts new file mode 100644 index 0000000..c89ff2a --- /dev/null +++ b/src/components/Calendar20/categoryColors.ts @@ -0,0 +1,84 @@ +import AsyncStorage from '@react-native-async-storage/async-storage'; + +const COLOR_POOL = [ + '#007AFF', // App Blue + '#34A853', // Green + '#EA4335', // Red + '#A142F4', // Purple + '#F4A125', // Orange + '#00ACC1', // Teal + '#E91E63', // Pink + '#795548', // Brown + '#607D8B', // Blue Grey + '#FF7043', // Deep Orange + '#66BB6A', // Light Green + '#AB47BC', // Medium Purple +]; + +const STORAGE_KEY = '@calendar20_category_colors'; + +class CategoryColorService { + private static instance: CategoryColorService; + private colorMap: Record = {}; + private loaded = false; + + static getInstance(): CategoryColorService { + if (!CategoryColorService.instance) { + CategoryColorService.instance = new CategoryColorService(); + } + return CategoryColorService.instance; + } + + async load(): Promise { + if (this.loaded) return; + try { + const stored = await AsyncStorage.getItem(STORAGE_KEY); + if (stored) { + this.colorMap = JSON.parse(stored); + } + } catch { + this.colorMap = {}; + } + this.loaded = true; + } + + private async save(): Promise { + try { + await AsyncStorage.setItem(STORAGE_KEY, JSON.stringify(this.colorMap)); + } catch { + // silent fail + } + } + + getColor(categoryName: string): string { + if (!categoryName) return COLOR_POOL[0]; + + const key = categoryName.toLowerCase().trim(); + if (this.colorMap[key]) { + return this.colorMap[key]; + } + + // Assign next available color from pool + const usedColors = new Set(Object.values(this.colorMap)); + let color = COLOR_POOL.find(c => !usedColors.has(c)); + if (!color) { + // All colors used, cycle based on hash + let hash = 0; + for (let i = 0; i < key.length; i++) { + hash = ((hash << 5) - hash + key.charCodeAt(i)) | 0; + } + color = COLOR_POOL[Math.abs(hash) % COLOR_POOL.length]; + } + + this.colorMap[key] = color; + this.save(); + return color; + } + + assignColors(categoryNames: string[]): void { + categoryNames.forEach(name => this.getColor(name)); + } +} + +export default CategoryColorService; +export { COLOR_POOL }; diff --git a/src/components/Calendar20/types.ts b/src/components/Calendar20/types.ts new file mode 100644 index 0000000..9acad4f --- /dev/null +++ b/src/components/Calendar20/types.ts @@ -0,0 +1,42 @@ +import { Task } from '../../services/taskService'; +import dayjs from 'dayjs'; + +export type CalendarViewType = 'month' | 'week' | '3day' | 'day' | 'agenda'; + +export interface CalendarTask extends Task { + displayColor: string; + startDayjs: dayjs.Dayjs; + endDayjs: dayjs.Dayjs; + durationMinutes: number; + isMultiDay: boolean; + isAllDay: boolean; +} + +export interface DayData { + date: dayjs.Dayjs; + dateString: string; + isCurrentMonth: boolean; + isToday: boolean; + tasks: CalendarTask[]; +} + +export interface WeekData { + days: DayData[]; +} + +export interface TimeSlot { + hour: number; + tasks: CalendarTask[]; +} + +export interface OverlapColumn { + task: CalendarTask; + column: number; + totalColumns: number; +} + +export interface CategoryColor { + categoryName: string; + categoryId?: string | number; + color: string; +} diff --git a/src/hooks/useVoiceChat.ts b/src/hooks/useVoiceChat.ts index ab65be5..c657563 100644 --- a/src/hooks/useVoiceChat.ts +++ b/src/hooks/useVoiceChat.ts @@ -1,20 +1,21 @@ import { useState, useRef, useCallback, useEffect } from 'react'; -import { VoiceBotWebSocket, VoiceChatCallbacks } from '../services/botservice'; -import { AudioRecorder, AudioPlayer, checkAudioPermissions, VADCallbacks } from '../utils/audioUtils'; -import { debugAudioDependencies } from '../utils/audioDebug'; +import { VoiceBotWebSocket, VoiceChatCallbacks, VoiceServerPhase } from '../services/voiceBotService'; +import { AudioRecorder, AudioPlayer, checkAudioPermissions, base64ToArrayBuffer } from '../utils/audioUtils'; /** * Stati possibili della chat vocale */ -export type VoiceChatState = - | 'idle' // Inattivo - | 'connecting' // Connessione in corso - | 'connected' // Connesso e pronto - | 'recording' // Registrazione audio utente - | 'processing' // Elaborazione server (trascrizione/IA) - | 'speaking' // Riproduzione risposta bot - | 'error' // Stato di errore - | 'disconnected'; // Disconnesso +export type VoiceChatState = + | 'idle' // Inattivo + | 'connecting' // Connessione WebSocket in corso + | 'authenticating' // Autenticazione in corso + | 'setting_up' // Server sta configurando MCP + RealtimeAgent + | 'ready' // Pronto per ricevere input + | 'recording' // Registrazione audio utente + | 'processing' // Agent sta elaborando + | 'speaking' // Riproduzione risposta audio + | 'error' // Stato di errore + | 'disconnected'; // Disconnesso /** * Informazioni sullo stato del server @@ -24,9 +25,28 @@ export interface ServerStatus { message: string; } +/** + * Trascrizione di un messaggio vocale + */ +export interface VoiceTranscript { + role: 'user' | 'assistant'; + content: string; +} + +/** + * Tool in esecuzione + */ +export interface ActiveTool { + name: string; + args: string; + status: 'running' | 'complete'; + output?: string; +} + /** * Hook personalizzato per la gestione della chat vocale - * Integra WebSocket, registrazione audio, e riproduzione + * Compatibile con l'OpenAI Realtime API tramite WebSocket + * Usa @picovoice/react-native-voice-processor per streaming PCM16 base64 in tempo reale a 24kHz */ export function useVoiceChat() { // Stati principali @@ -35,25 +55,22 @@ export function useVoiceChat() { const [serverStatus, setServerStatus] = useState(null); const [recordingDuration, setRecordingDuration] = useState(0); const [hasPermissions, setHasPermissions] = useState(false); + const [chunksReceived, setChunksReceived] = useState(0); + const [isMuted, setIsMuted] = useState(false); + + // Trascrizioni e tool + const [transcripts, setTranscripts] = useState([]); + const [activeTools, setActiveTools] = useState([]); // Refs per gestire le istanze const websocketRef = useRef(null); const audioRecorderRef = useRef(null); const audioPlayerRef = useRef(null); const recordingIntervalRef = useRef(null); - const lastChunkIndexRef = useRef(null); - - const [isReceivingAudio, setIsReceivingAudio] = useState(false); - const [chunksReceived, setChunksReceived] = useState(0); - - // Chunk timing diagnostics - const lastChunkTimeRef = useRef(0); - const chunkTimingsRef = useRef([]); - - // VAD states (sempre attivo di default) - const [vadEnabled, setVadEnabled] = useState(true); - const [audioLevel, setAudioLevel] = useState(-160); - const [isSpeechActive, setIsSpeechActive] = useState(false); + const shouldAutoStartRecordingRef = useRef(false); + const agentEndedRef = useRef(true); // true = agent ha finito, possiamo registrare + const isMutedRef = useRef(false); + const isManuallyMutedRef = useRef(false); // Distingue tra mute manuale e automatico /** * Verifica e richiede i permessi audio @@ -62,12 +79,12 @@ export function useVoiceChat() { try { const granted = await checkAudioPermissions(); setHasPermissions(granted); - + if (!granted) { setError('Permessi microfono richiesti per la chat vocale'); setState('error'); } - + return granted; } catch (err) { console.error('Errore richiesta permessi:', err); @@ -77,164 +94,261 @@ export function useVoiceChat() { } }, []); - - /** - * Ref per gestire l'avvio automatico della registrazione dopo autenticazione - */ - const shouldAutoStartRecordingRef = useRef(false); - /** * Callback per gestire i messaggi WebSocket */ const websocketCallbacks: VoiceChatCallbacks = { onConnectionOpen: () => { - console.log('🎤 WebSocket connesso, in attesa di autenticazione...'); - // Non impostare 'connected' qui, aspetta l'autenticazione + setState('authenticating'); setError(null); }, onAuthenticationSuccess: (message: string) => { - console.log('✅ Autenticazione completata:', message); - setState('connected'); + console.log('Autenticazione completata:', message); + setState('setting_up'); + }, + + onReady: () => { + console.log('Sessione vocale pronta'); + setState('ready'); - // Avvia la registrazione se richiesto - if (shouldAutoStartRecordingRef.current) { - console.log('🎤 Avvio registrazione automatica post-autenticazione...'); + // Avvia la registrazione automaticamente se richiesto e non mutato + if (shouldAutoStartRecordingRef.current && !isMutedRef.current) { shouldAutoStartRecordingRef.current = false; setTimeout(() => { startRecording(); - }, 100); + }, 500); + } else if (isMutedRef.current) { + shouldAutoStartRecordingRef.current = false; } }, - onAuthenticationFailed: (error: string) => { - console.error('❌ Autenticazione fallita:', error); - setError(`Autenticazione fallita: ${error}`); + onAuthenticationFailed: (errorMsg: string) => { + console.error('Autenticazione fallita:', errorMsg); + setError(`Autenticazione fallita: ${errorMsg}`); setState('error'); }, onConnectionClose: () => { - console.log('🎤 WebSocket disconnesso'); + console.log('WebSocket disconnesso - cleanup in corso'); setState('disconnected'); shouldAutoStartRecordingRef.current = false; + + // Ferma la registrazione se attiva per evitare invio audio su connessione morta + if (audioRecorderRef.current?.isCurrentlyRecording()) { + audioRecorderRef.current.cancelRecording().catch(err => { + console.error('Errore fermando registrazione su disconnessione:', err); + }); + } + + // Pulisci il timer della durata + if (recordingIntervalRef.current) { + clearInterval(recordingIntervalRef.current); + recordingIntervalRef.current = null; + } }, - onStatus: (phase: string, message: string) => { - console.log(`📡 Status Server: ${phase} - ${message}`); + onStatus: (phase: VoiceServerPhase, message: string) => { + console.log(`[useVoiceChat] onStatus: phase=${phase}, message=${message}`); setServerStatus({ phase, message }); switch (phase) { - case 'receiving_audio': - // Audio ricevuto dal server - console.log('📥 Server sta ricevendo audio...'); - lastChunkIndexRef.current = null; + case 'speech_started': + // Utente ha iniziato a parlare (VAD di OpenAI) + console.log('[useVoiceChat] 🎤 SPEECH_STARTED: Utente sta parlando'); + setState('recording'); break; - case 'transcription': - case 'transcription_complete': - case 'ai_processing': - case 'ai_complete': - case 'tts_generation': - case 'tts_complete': + + case 'speech_stopped': + // Utente ha finito di parlare (VAD di OpenAI) + // IMPORTANTE: Fermiamo il microfono QUI e non lo riattiveremo + // finché l'agent non ha completato TUTTO (elaborazione + riproduzione) + console.log('[useVoiceChat] 🛑 SPEECH_STOPPED: Utente ha finito di parlare - auto-mute attivo'); + + // Auto-mute: ferma la registrazione + if (audioRecorderRef.current?.isCurrentlyRecording()) { + audioRecorderRef.current.stopRecording().catch(err => { + console.error('Errore fermando registrazione su speech_stopped:', err); + }); + if (recordingIntervalRef.current) { + clearInterval(recordingIntervalRef.current); + recordingIntervalRef.current = null; + } + setRecordingDuration(0); + } + + // Aggiorna UI del mute (solo se non è mutato manualmente) + if (!isManuallyMutedRef.current) { + setIsMuted(true); + isMutedRef.current = true; + } + setState('processing'); break; - case 'audio_streaming': - setIsReceivingAudio(true); - setChunksReceived(0); - lastChunkIndexRef.current = null; - if (audioPlayerRef.current) { - audioPlayerRef.current.clearChunks(); + + case 'agent_start': + console.log('[useVoiceChat] Agent iniziato - assicuro auto-mute'); + setState('processing'); + agentEndedRef.current = false; // Agent sta elaborando + + // Auto-mute (safety check): assicuriamoci che il microfono sia fermato + if (audioRecorderRef.current?.isCurrentlyRecording()) { + audioRecorderRef.current.stopRecording().catch(err => { + console.error('Errore fermando registrazione su agent_start:', err); + }); + if (recordingIntervalRef.current) { + clearInterval(recordingIntervalRef.current); + recordingIntervalRef.current = null; + } + setRecordingDuration(0); + } + + // Aggiorna UI del mute (solo se non è mutato manualmente) + if (!isManuallyMutedRef.current) { + setIsMuted(true); + isMutedRef.current = true; } - setState('speaking'); break; - case 'complete': - console.log('✅ Pipeline completa!'); - setIsReceivingAudio(false); - lastChunkIndexRef.current = null; - // Reset chunk timing per prossimo ciclo - lastChunkTimeRef.current = 0; - chunkTimingsRef.current = []; + case 'agent_end': + // Agent ha finito di elaborare + console.log('[useVoiceChat] Agent terminato'); + agentEndedRef.current = true; - if (audioPlayerRef.current && audioPlayerRef.current.getChunksCount() > 0) { - const bufferedCount = audioPlayerRef.current.getBufferedChunksCount(); - console.log(`🔊 Ricevuti ${bufferedCount} chunk totali. Avvio riproduzione sequenziale...`); + // IMPORTANTE: Non riattivare il microfono se: + // 1. Ci sono chunk audio in coda da riprodurre + // 2. L'audio player sta ATTIVAMENTE riproducendo + const hasQueuedChunks = audioPlayerRef.current && audioPlayerRef.current.getChunksCount() > 0; + const isPlaying = audioPlayerRef.current?.isCurrentlyPlaying(); - setState('speaking'); + if (!hasQueuedChunks && !isPlaying) { + console.log('[useVoiceChat] Nessun audio in riproduzione, auto-unmute'); + setState('ready'); + + // Auto-unmute: riattiva microfono (solo se non mutato manualmente) + if (!isManuallyMutedRef.current) { + setIsMuted(false); + isMutedRef.current = false; - // Riproduci i chunk uno dopo l'altro (sequenzialmente) - // Questo evita problemi di concatenazione MP3 - audioPlayerRef.current.playChunksSequentially(() => { - console.log('🔊 Riproduzione completata, riavvio registrazione...'); - setState('connected'); - // Riavvia automaticamente la registrazione per la prossima domanda + // Riavvia registrazione dopo un breve delay setTimeout(() => { - startRecording(); - }, 500); - }); + if (audioRecorderRef.current && websocketRef.current?.isReady()) { + startRecording(); + } + }, 300); + } } else { - console.log('⚠️ Nessun chunk audio ricevuto, riavvio registrazione...'); - setState('connected'); - // Riavvia automaticamente la registrazione anche se non ci sono chunk - setTimeout(() => { - startRecording(); - }, 500); + console.log(`[useVoiceChat] Audio in corso (chunks: ${audioPlayerRef.current?.getChunksCount() || 0}, playing: ${isPlaying}), mantengo mute fino a fine riproduzione`); } break; - } - }, - - onAudioChunk: (audioData: string, chunkIndex?: number) => { - const currentTime = Date.now(); - - // Traccia timing inter-arrival - if (lastChunkTimeRef.current > 0) { - const interArrivalMs = currentTime - lastChunkTimeRef.current; - chunkTimingsRef.current.push(interArrivalMs); + case 'audio_end': + // Server ha finito di inviare chunk audio per questo segmento + if (audioPlayerRef.current && audioPlayerRef.current.getChunksCount() > 0) { + setState('speaking'); + console.log(`[useVoiceChat] Avvio riproduzione audio (${audioPlayerRef.current.getChunksCount()} chunk) - mantengo auto-mute`); + audioPlayerRef.current.playPcm16Chunks(() => { + console.log('[useVoiceChat] Riproduzione completata'); + // Riattiva il microfono SOLO se l'agent ha finito completamente + if (agentEndedRef.current) { + console.log('[useVoiceChat] Agent finito, auto-unmute e riavvio registrazione'); + setState('ready'); + + // Auto-unmute: riattiva microfono (solo se non mutato manualmente) + if (!isManuallyMutedRef.current) { + setIsMuted(false); + isMutedRef.current = false; + + // Riavvia registrazione dopo un breve delay + setTimeout(() => { + if (audioRecorderRef.current && websocketRef.current?.isReady()) { + startRecording(); + } + }, 300); + } + } else { + // Agent non ha ancora finito, torna in processing + // e aspetta altri chunk audio o agent_end + console.log('[useVoiceChat] Agent non ancora finito, attendo...'); + setState('processing'); + } + }); + } else if (agentEndedRef.current) { + // Nessun audio da riprodurre e agent finito, torna pronto + console.log('[useVoiceChat] Nessun audio da riprodurre, auto-unmute'); + setState('ready'); - if (interArrivalMs < 10) { - console.warn(`🔊 ⚡ Chunk burst: ${interArrivalMs}ms tra chunk #${(lastChunkIndexRef.current ?? -1) + 1} e #${chunkIndex}`); - } - } - lastChunkTimeRef.current = currentTime; + // Auto-unmute: riattiva microfono (solo se non mutato manualmente) + if (!isManuallyMutedRef.current) { + setIsMuted(false); + isMutedRef.current = false; - console.log(`🔊 Ricevuto chunk audio ${typeof chunkIndex === 'number' ? `#${chunkIndex}` : '(senza indice)'}`); + // Riavvia registrazione dopo un breve delay + setTimeout(() => { + if (audioRecorderRef.current && websocketRef.current?.isReady()) { + startRecording(); + } + }, 300); + } + } + break; - if (!audioPlayerRef.current) { - console.error('🔊 AudioPlayer non inizializzato'); - return; - } + case 'interrupted': + // Risposta interrotta dall'utente, torna pronto + console.log('[useVoiceChat] Risposta interrotta, auto-unmute'); + agentEndedRef.current = true; // Reset + if (audioPlayerRef.current) { + audioPlayerRef.current.stopPlayback(); + audioPlayerRef.current.clearChunks(); + } + setState('ready'); - if (typeof chunkIndex === 'number') { - const previousIndex = lastChunkIndexRef.current; + // Auto-unmute: riattiva microfono (solo se non mutato manualmente) + if (!isManuallyMutedRef.current) { + setIsMuted(false); + isMutedRef.current = false; - if (previousIndex !== null) { - if (chunkIndex === previousIndex) { - console.warn(`🔊 Chunk duplicato #${chunkIndex} ricevuto dal server`); - } else if (chunkIndex < previousIndex) { - console.warn(`🔊 Chunk fuori ordine: #${chunkIndex} ricevuto dopo #${previousIndex}`); - } else if (chunkIndex > previousIndex + 1) { - console.warn(`🔊 Mancano ${chunkIndex - previousIndex - 1} chunk prima di #${chunkIndex}`); + // Riavvia registrazione dopo un breve delay + setTimeout(() => { + if (audioRecorderRef.current && websocketRef.current?.isReady()) { + startRecording(); + } + }, 200); } - } else if (chunkIndex > 0) { - console.warn(`🔊 Primo chunk ricevuto con indice ${chunkIndex} (atteso 0)`); - } + break; } + }, - const stored = audioPlayerRef.current.addChunk(audioData, chunkIndex); - - if (stored) { - if (typeof chunkIndex === 'number') { - lastChunkIndexRef.current = lastChunkIndexRef.current === null - ? chunkIndex - : Math.max(lastChunkIndexRef.current, chunkIndex); - } + onAudioChunk: (audioData: string, chunkIndex: number) => { + if (audioPlayerRef.current) { + audioPlayerRef.current.addChunk(audioData, chunkIndex); setChunksReceived(prev => prev + 1); } }, + onTranscript: (role: 'user' | 'assistant', content: string) => { + setTranscripts(prev => [...prev, { role, content }]); + }, + + onToolCall: (toolName: string, args: string) => { + setActiveTools(prev => [...prev, { name: toolName, args, status: 'running' }]); + }, + + onToolOutput: (toolName: string, output: string) => { + setActiveTools(prev => prev.map(t => + t.name === toolName && t.status === 'running' + ? { ...t, status: 'complete' as const, output } + : t + )); + }, + + onDone: () => { + console.log('Sessione vocale terminata dal server'); + setState('disconnected'); + }, + onError: (errorMessage: string) => { - console.error('🎤 Errore WebSocket:', errorMessage); + console.error('Errore WebSocket:', errorMessage); setError(errorMessage); setState('error'); } @@ -245,21 +359,14 @@ export function useVoiceChat() { */ const initialize = useCallback(async (): Promise => { try { - debugAudioDependencies(); - const permissionsGranted = await requestPermissions(); - if (!permissionsGranted) { - return false; - } + if (!permissionsGranted) return false; audioRecorderRef.current = new AudioRecorder(); audioPlayerRef.current = new AudioPlayer(); - websocketRef.current = new VoiceBotWebSocket(websocketCallbacks); - console.log('🎤 Componenti audio inizializzati'); return true; - } catch (err) { console.error('Errore inizializzazione:', err); setError('Errore durante l\'inizializzazione'); @@ -269,7 +376,7 @@ export function useVoiceChat() { }, [requestPermissions]); /** - * Connette al servizio vocale e avvia automaticamente la registrazione + * Connette al servizio vocale */ const connect = useCallback(async (): Promise => { if (!websocketRef.current) { @@ -279,55 +386,25 @@ export function useVoiceChat() { setState('connecting'); setError(null); + setTranscripts([]); + setActiveTools([]); + setChunksReceived(0); + shouldAutoStartRecordingRef.current = true; + agentEndedRef.current = true; // Reset per nuova sessione + isManuallyMutedRef.current = false; // Reset mute manuale try { - console.log('🔌 CONNECT: Connessione WebSocket in corso...'); const connected = await websocketRef.current!.connect(); - if (!connected) { - console.error('❌ CONNECT: Connessione fallita'); setError('Impossibile connettersi al servizio vocale'); setState('error'); - return false; - } - - console.log('✅ CONNECT: WebSocket connesso, attesa autenticazione...'); - - // Imposta il flag per avviare automaticamente la registrazione dopo l'autenticazione - shouldAutoStartRecordingRef.current = true; - - // Aspetta che il WebSocket sia autenticato (non solo connesso) - let retries = 0; - const maxRetries = 30; // 3 secondi max per autenticazione - - while (!websocketRef.current.isAuthenticated() && retries < maxRetries) { - console.log(`⏳ CONNECT: Attesa autenticazione... (${retries + 1}/${maxRetries})`); - await new Promise(resolve => setTimeout(resolve, 100)); - retries++; - - // Se il WebSocket si è disconnesso durante l'attesa, esci - if (!websocketRef.current.isConnected()) { - console.error('❌ CONNECT: WebSocket disconnesso durante autenticazione'); - setError('WebSocket disconnesso'); - setState('error'); - shouldAutoStartRecordingRef.current = false; - return false; - } - } - - if (!websocketRef.current.isAuthenticated()) { - console.error('❌ CONNECT: Timeout autenticazione WebSocket'); - setError('Timeout autenticazione'); - setState('error'); shouldAutoStartRecordingRef.current = false; return false; } - - console.log('✅ CONNECT: Autenticazione completata! Registrazione verrà avviata automaticamente...'); + // Le transizioni di stato avvengono via callback: + // connecting -> authenticating -> setting_up -> ready return true; - } catch (err) { - console.error('❌ CONNECT: Errore connessione:', err); setError('Errore di connessione'); setState('error'); shouldAutoStartRecordingRef.current = false; @@ -336,110 +413,67 @@ export function useVoiceChat() { }, [initialize]); /** - * VAD Callbacks - */ - const vadCallbacks: VADCallbacks = { - onSpeechStart: () => { - console.log('🎙️ HOOK: ✅ Inizio voce rilevato - UI aggiornata'); - setIsSpeechActive(true); - }, - onSpeechEnd: () => { - console.log('🎙️ HOOK: ⏹️ Fine voce rilevata - UI aggiornata'); - setIsSpeechActive(false); - }, - onSilenceDetected: () => { - console.log('🎙️ HOOK: 🔇 Silenzio rilevato - Timer avviato'); - }, - onAutoStop: async () => { - console.log('🎙️ HOOK: 🛑 Auto-stop chiamato - Fermando registrazione...'); - await stopRecording(); - }, - onMeteringUpdate: (level: number) => { - setAudioLevel(level); - // Log dettagliato del livello solo ogni secondo (invece di ogni 100ms) - if (Date.now() % 1000 < 150) { - console.log(`🎚️ HOOK: Audio level aggiornato → ${level.toFixed(1)} dB`); - } - }, - }; - - /** - * Toggle VAD mode - */ - const toggleVAD = useCallback(() => { - setVadEnabled(prev => !prev); - }, []); - - /** - * Avvia la registrazione audio + * Avvia la registrazione audio con streaming chunks via WebSocket. + * Ogni frame PCM16 a 24kHz viene inviato in tempo reale come binary frame. + * + * IMPORTANTE: Il microfono invia audio continuamente. OpenAI gestisce + * automaticamente VAD e interruzioni. Non serve commit o interrupt manuale. */ const startRecording = useCallback(async (): Promise => { - console.log('🎬 START RECORDING: Chiamata startRecording()'); - if (!audioRecorderRef.current || !websocketRef.current) { - console.error('❌ START RECORDING: Servizio non inizializzato'); - console.log(' - audioRecorderRef:', !!audioRecorderRef.current); - console.log(' - websocketRef:', !!websocketRef.current); setError('Servizio non inizializzato'); return false; } - if (!websocketRef.current.isConnected()) { - console.error('❌ START RECORDING: WebSocket non connesso'); - setError('WebSocket non connesso'); + if (!websocketRef.current.isReady()) { + setError('Sessione vocale non pronta'); return false; } - if (!websocketRef.current.isAuthenticated()) { - console.error('❌ START RECORDING: WebSocket non autenticato'); - setError('WebSocket non autenticato'); - return false; - } - - console.log('✅ START RECORDING: Pre-check OK (connesso e autenticato), avvio registrazione...'); - try { - const started = await audioRecorderRef.current.startRecording(vadEnabled, vadCallbacks); - console.log('📝 START RECORDING: Risultato startRecording():', started); + // Callback invocato per ogni chunk audio PCM16 a 24kHz + // Converte base64 in ArrayBuffer e lo invia come binary frame + const onChunk = (base64Chunk: string) => { + try { + const arrayBuffer = base64ToArrayBuffer(base64Chunk); + websocketRef.current?.sendAudio(arrayBuffer); + } catch (error) { + console.error('Errore conversione chunk audio:', error); + } + }; + const started = await audioRecorderRef.current.startRecording(onChunk); if (!started) { - console.error('❌ START RECORDING: Impossibile avviare la registrazione'); setError('Impossibile avviare la registrazione'); return false; } - console.log('✅ START RECORDING: Registrazione avviata con successo!'); setState('recording'); setError(null); - setIsSpeechActive(false); - setAudioLevel(-160); // Aggiorna la durata della registrazione ogni 100ms recordingIntervalRef.current = setInterval(() => { if (audioRecorderRef.current) { - const duration = audioRecorderRef.current.getRecordingDuration(); - setRecordingDuration(duration); + setRecordingDuration(audioRecorderRef.current.getRecordingDuration()); } }, 100); - console.log('🎤 Registrazione avviata', vadEnabled ? '(VAD attivo)' : '(VAD disattivo)'); return true; - } catch (err) { - console.error('❌ START RECORDING: Errore avvio registrazione:', err); + console.error('Errore avvio registrazione:', err); setError('Errore durante la registrazione'); setState('error'); return false; } - }, [vadEnabled]); + }, []); /** - * Ferma la registrazione e invia l'audio al server + * Ferma la registrazione. + * I chunks sono già stati inviati in streaming durante la registrazione. + * Il VAD di OpenAI rileva automaticamente la fine della frase, non serve commit manuale. */ const stopRecording = useCallback(async (): Promise => { - if (!audioRecorderRef.current || !websocketRef.current) { - return false; - } + if (!audioRecorderRef.current || !websocketRef.current) return false; // Ferma il timer della durata if (recordingIntervalRef.current) { @@ -448,26 +482,17 @@ export function useVoiceChat() { } try { - const audioData = await audioRecorderRef.current.stopRecording(); - if (!audioData) { - setError('Nessun dato audio registrato'); - setState('error'); - return false; - } + await audioRecorderRef.current.stopRecording(); + + console.log('Registrazione fermata (chunks già inviati in streaming, VAD automatico attivo)'); - console.log('🎤 Invio audio al server...'); - - // Invia l'audio al server tramite WebSocket - websocketRef.current.sendAudioChunk(audioData, true); - setState('processing'); setRecordingDuration(0); - return true; - + } catch (err) { console.error('Errore stop registrazione:', err); - setError('Errore durante l\'invio dell\'audio'); + setError('Errore durante l\'arresto della registrazione'); setState('error'); return false; } @@ -487,8 +512,7 @@ export function useVoiceChat() { } setRecordingDuration(0); - setState('connected'); - console.log('🎤 Registrazione cancellata'); + setState('ready'); }, []); /** @@ -497,67 +521,164 @@ export function useVoiceChat() { const stopPlayback = useCallback(async (): Promise => { if (audioPlayerRef.current) { await audioPlayerRef.current.stopPlayback(); + audioPlayerRef.current.clearChunks(); } - - setState('connected'); - console.log('🔊 Riproduzione fermata'); + + setState('ready'); }, []); /** - * Invia comando di controllo al server + * Invia un messaggio di testo all'assistente */ - const sendControl = useCallback((action: 'pause' | 'resume' | 'cancel'): void => { - if (websocketRef.current && websocketRef.current.isConnected()) { - websocketRef.current.sendControl(action); - console.log(`🎤 Comando inviato: ${action}`); + const sendTextMessage = useCallback((content: string): void => { + if (websocketRef.current?.isReady()) { + websocketRef.current.sendText(content); + setState('processing'); } }, []); + /** + * Muta il microfono (azione manuale dell'utente) + */ + const mute = useCallback(async (): Promise => { + console.log('[useVoiceChat] Mute manuale attivato'); + setIsMuted(true); + isMutedRef.current = true; + isManuallyMutedRef.current = true; // Marca come mute manuale + + // Ferma la registrazione se è attiva + if (audioRecorderRef.current?.isCurrentlyRecording()) { + try { + await audioRecorderRef.current.cancelRecording(); + + // Pulisci il timer della durata + if (recordingIntervalRef.current) { + clearInterval(recordingIntervalRef.current); + recordingIntervalRef.current = null; + } + + setRecordingDuration(0); + // Mantieni lo stato 'ready' invece di tornare a 'recording' + if (state === 'recording') { + setState('ready'); + } + } catch (err) { + console.error('Errore durante il mute:', err); + } + } + }, [state]); + + /** + * Riattiva il microfono (azione manuale dell'utente) + */ + const unmute = useCallback(async (): Promise => { + console.log('[useVoiceChat] Unmute manuale attivato'); + setIsMuted(false); + isMutedRef.current = false; + isManuallyMutedRef.current = false; // Rimuove il flag di mute manuale + + // Riavvia la registrazione se siamo in stato 'ready' + if (state === 'ready' && websocketRef.current?.isReady()) { + setTimeout(() => { + startRecording(); + }, 100); + } + }, [state, startRecording]); + /** * Disconnette dal servizio */ - const disconnect = useCallback((): void => { + const disconnect = useCallback(async (): Promise => { + console.log('Disconnessione in corso...'); + + // Prima ferma la registrazione per evitare invio audio su connessione che sta chiudendo + if (audioRecorderRef.current?.isCurrentlyRecording()) { + try { + await audioRecorderRef.current.cancelRecording(); + } catch (err) { + console.error('Errore fermando registrazione durante disconnect:', err); + } + } + + // Pulisci il timer della durata + if (recordingIntervalRef.current) { + clearInterval(recordingIntervalRef.current); + recordingIntervalRef.current = null; + } + + // Ferma l'audio player + if (audioPlayerRef.current?.isCurrentlyPlaying()) { + try { + await audioPlayerRef.current.stopPlayback(); + } catch (err) { + console.error('Errore fermando playback durante disconnect:', err); + } + } + + // Poi chiudi il WebSocket if (websocketRef.current) { websocketRef.current.disconnect(); } - + setState('idle'); setServerStatus(null); setError(null); - console.log('🎤 Disconnesso dal servizio'); + setTranscripts([]); + setActiveTools([]); + setRecordingDuration(0); + setIsMuted(false); // Reset mute state + isMutedRef.current = false; + isManuallyMutedRef.current = false; // Reset mute manuale }, []); /** * Pulisce tutte le risorse */ const cleanup = useCallback(async (): Promise => { - // Ferma registrazione se attiva + console.log('Cleanup risorse voice chat...'); + + // Pulisci il timer della durata + if (recordingIntervalRef.current) { + clearInterval(recordingIntervalRef.current); + recordingIntervalRef.current = null; + } + + // Prima ferma la registrazione if (audioRecorderRef.current) { - await audioRecorderRef.current.cancelRecording(); + try { + await audioRecorderRef.current.cancelRecording(); + } catch (err) { + console.error('Errore cleanup registrazione:', err); + } + audioRecorderRef.current = null; } - // Ferma riproduzione se attiva + // Poi ferma il player if (audioPlayerRef.current) { - await audioPlayerRef.current.destroy(); + try { + await audioPlayerRef.current.destroy(); + } catch (err) { + console.error('Errore cleanup player:', err); + } + audioPlayerRef.current = null; } - // Disconnetti WebSocket + // Infine chiudi il WebSocket if (websocketRef.current) { - websocketRef.current.destroy(); - } - - // Pulisci timer - if (recordingIntervalRef.current) { - clearInterval(recordingIntervalRef.current); + try { + websocketRef.current.destroy(); + } catch (err) { + console.error('Errore cleanup websocket:', err); + } + websocketRef.current = null; } - // Reset stati setState('idle'); setError(null); setServerStatus(null); setRecordingDuration(0); - - console.log('🎤 Risorse pulite'); + setTranscripts([]); + setActiveTools([]); }, []); // Cleanup automatico quando il componente viene smontato @@ -567,12 +688,12 @@ export function useVoiceChat() { }; }, [cleanup]); - // Stati derivati per convenience - const isConnected = state === 'connected' || state === 'recording' || state === 'processing' || state === 'speaking'; + // Stati derivati + const isConnected = ['ready', 'recording', 'processing', 'speaking'].includes(state); const isRecording = state === 'recording'; const isProcessing = state === 'processing'; const isSpeaking = state === 'speaking'; - const canRecord = state === 'connected' && hasPermissions; + const canRecord = state === 'ready' && hasPermissions; const canStop = state === 'recording'; return { @@ -583,6 +704,11 @@ export function useVoiceChat() { recordingDuration, hasPermissions, chunksReceived, + isMuted, + + // Trascrizioni e tool + transcripts, + activeTools, // Stati derivati isConnected, @@ -592,11 +718,6 @@ export function useVoiceChat() { canRecord, canStop, - // VAD stati - vadEnabled, - audioLevel, - isSpeechActive, - // Azioni initialize, connect, @@ -605,9 +726,10 @@ export function useVoiceChat() { stopRecording, cancelRecording, stopPlayback, - sendControl, + sendTextMessage, cleanup, requestPermissions, - toggleVAD, + mute, + unmute, }; } diff --git a/src/locales/en.json b/src/locales/en.json index b22e19c..bea7817 100644 --- a/src/locales/en.json +++ b/src/locales/en.json @@ -5,6 +5,7 @@ "categories": "Categories", "notes": "Notes", "calendar": "Calendar", + "calendar20": "Calendar 2.0", "statistics": "Statistics" }, "screens": { @@ -434,6 +435,34 @@ "sunday": "Sun" } }, + "calendar20": { + "title": "Calendar 2.0", + "views": { + "month": "Month", + "week": "Week", + "threeDay": "3 Days", + "day": "Day", + "agenda": "Agenda" + }, + "today": "Today", + "allDay": "All day", + "noEvents": "No events", + "more": "+{{count}} more", + "search": { + "placeholder": "Search tasks...", + "noResults": "No tasks found" + }, + "fab": { + "newTask": "New Task", + "newEvent": "New Event" + }, + "drawer": { + "views": "Views", + "categories": "Categories", + "showAll": "Show all", + "settings": "Settings" + } + }, "statistics": { "title": "Statistics", "overview": "Overview", diff --git a/src/locales/it.json b/src/locales/it.json index 3f9e87c..013ce36 100644 --- a/src/locales/it.json +++ b/src/locales/it.json @@ -5,6 +5,7 @@ "categories": "Categorie", "notes": "Note", "calendar": "Calendario", + "calendar20": "Calendario 2.0", "statistics": "Statistiche" }, "screens": { @@ -434,6 +435,33 @@ "sunday": "Dom" } }, + "calendar20": { + "title": "Calendario 2.0", + "views": { + "month": "Mese", + "week": "Settimana", + "threeDay": "3 Giorni", + "day": "Giorno", + "agenda": "Agenda" + }, + "today": "Oggi", + "allDay": "Tutto il giorno", + "noEvents": "Nessun evento", + "more": "+{{count}} altri", + "search": { + "placeholder": "Cerca task...", + "noResults": "Nessun task trovato" + }, + "fab": { + "newTask": "Nuovo Task", + "newEvent": "Nuovo Evento" + }, + "drawer": { + "views": "Viste", + "categories": "Categorie", + "showAll": "Mostra tutto" + } + }, "statistics": { "title": "Statistiche", "overview": "Panoramica", diff --git a/src/navigation/screens/BotChat.tsx b/src/navigation/screens/BotChat.tsx index 7d66a33..acbdf62 100644 --- a/src/navigation/screens/BotChat.tsx +++ b/src/navigation/screens/BotChat.tsx @@ -1,6 +1,6 @@ import React, { useState, useCallback, useEffect } from 'react'; import { View, KeyboardAvoidingView, Platform, SafeAreaView, Alert, Keyboard, Dimensions } from 'react-native'; -import { sendMessageToBot, createNewChat, formatMessage, clearChatHistory } from '../../services/botservice'; +import { sendMessageToBot, createNewChat, formatMessage, clearChatHistory } from '../../services/textBotService'; import { getChatWithMessages, ChatMessage } from '../../services/chatHistoryService'; import { ChatHeader, diff --git a/src/navigation/screens/Calendar.tsx b/src/navigation/screens/Calendar.tsx index cb42b65..0b4ecc7 100644 --- a/src/navigation/screens/Calendar.tsx +++ b/src/navigation/screens/Calendar.tsx @@ -1,27 +1,83 @@ -import React from 'react'; +import React, { useState, useEffect } from 'react'; import { View, StyleSheet, Text, SafeAreaView, - StatusBar + StatusBar, + TouchableOpacity } from 'react-native'; +import { Ionicons } from '@expo/vector-icons'; +import AsyncStorage from '@react-native-async-storage/async-storage'; import CalendarView from '../../components/Calendar/CalendarView'; +import Calendar20View from '../../components/Calendar20/Calendar20View'; import { useTranslation } from 'react-i18next'; +const CALENDAR_VIEW_MODE_KEY = '@calendar_view_mode'; + export default function Calendar() { const { t } = useTranslation(); + const [viewMode, setViewMode] = useState<'minimal' | 'advanced'>('minimal'); + const [isLoading, setIsLoading] = useState(true); + + // Carica la preferenza salvata all'avvio + useEffect(() => { + loadViewMode(); + }, []); + + const loadViewMode = async () => { + try { + const savedMode = await AsyncStorage.getItem(CALENDAR_VIEW_MODE_KEY); + if (savedMode === 'advanced' || savedMode === 'minimal') { + setViewMode(savedMode); + } + } catch (error) { + console.error('Error loading calendar view mode:', error); + } finally { + setIsLoading(false); + } + }; + + const toggleViewMode = async () => { + const newMode = viewMode === 'minimal' ? 'advanced' : 'minimal'; + setViewMode(newMode); + try { + await AsyncStorage.setItem(CALENDAR_VIEW_MODE_KEY, newMode); + } catch (error) { + console.error('Error saving calendar view mode:', error); + } + }; + + if (isLoading) { + return ( + + + + ); + } + return ( - {/* Header con titolo principale - stesso stile di Home20 e Categories */} + {/* Header con titolo principale e toggle button */} {t('calendar.title')} + + + - + {viewMode === 'minimal' ? : } ); @@ -38,6 +94,7 @@ const styles = StyleSheet.create({ paddingBottom: 0, flexDirection: "row", alignItems: "flex-start", + justifyContent: "space-between", }, mainTitle: { paddingTop: 10, @@ -48,6 +105,13 @@ const styles = StyleSheet.create({ fontFamily: "System", letterSpacing: -1.5, marginBottom: 0, + flex: 1, + }, + toggleButton: { + paddingTop: 15, + paddingLeft: 15, + paddingRight: 5, + paddingBottom: 10, }, content: { flex: 1, diff --git a/src/navigation/screens/Calendar20.tsx b/src/navigation/screens/Calendar20.tsx new file mode 100644 index 0000000..5bdc4b1 --- /dev/null +++ b/src/navigation/screens/Calendar20.tsx @@ -0,0 +1,19 @@ +import React from 'react'; +import { SafeAreaView, StatusBar, StyleSheet } from 'react-native'; +import Calendar20View from '../../components/Calendar20/Calendar20View'; + +export default function Calendar20() { + return ( + + + + + ); +} + +const styles = StyleSheet.create({ + container: { + flex: 1, + backgroundColor: '#ffffff', + }, +}); diff --git a/src/navigation/screens/Home.tsx b/src/navigation/screens/Home.tsx index cb7e415..becf11c 100644 --- a/src/navigation/screens/Home.tsx +++ b/src/navigation/screens/Home.tsx @@ -20,7 +20,7 @@ import { Ionicons } from "@expo/vector-icons"; import AsyncStorage from "@react-native-async-storage/async-storage"; import { ChatList, Message } from "../../components/BotChat"; import { ToolWidget } from "../../components/BotChat/types"; -import { sendMessageToBot, formatMessage, StreamingCallback, createNewChat } from "../../services/botservice"; +import { sendMessageToBot, formatMessage, StreamingCallback, createNewChat } from "../../services/textBotService"; import { getChatWithMessages, ChatMessage } from "../../services/chatHistoryService"; import { STORAGE_KEYS } from "../../constants/authConstants"; import { TaskCacheService } from '../../services/TaskCacheService'; @@ -195,6 +195,8 @@ const HomeScreen = () => { duration: 200, useNativeDriver: false, }).start(); + + console.log('[HOME] Mic button animation state:', { isInputFocused, targetValue: isInputFocused ? 0 : 1 }); }, [isInputFocused, micButtonAnim]); // Effetto per gestire la visualizzazione della tastiera @@ -216,6 +218,10 @@ const HomeScreen = () => { const keyboardDidHideListener = Keyboard.addListener( "keyboardDidHide", () => { + // Forza il reset del focus quando la tastiera si nasconde + console.log('[HOME] Keyboard hidden, resetting input focus state'); + setIsInputFocused(false); + if (chatStarted) { // Riporta l'input in posizione normale Animated.timing(inputBottomPosition, { @@ -811,8 +817,14 @@ const HomeScreen = () => { returnKeyType="send" blurOnSubmit={true} editable={!isLoading} - onFocus={() => setIsInputFocused(true)} - onBlur={() => setIsInputFocused(false)} + onFocus={() => { + console.log('[HOME] TextInput focused (under greeting)'); + setIsInputFocused(true); + }} + onBlur={() => { + console.log('[HOME] TextInput blurred (under greeting)'); + setIsInputFocused(false); + }} /> { returnKeyType="send" blurOnSubmit={true} editable={!isLoading} - onFocus={() => setIsInputFocused(true)} - onBlur={() => setIsInputFocused(false)} + onFocus={() => { + console.log('[HOME] TextInput focused (chat started)'); + setIsInputFocused(true); + }} + onBlur={() => { + console.log('[HOME] TextInput blurred (chat started)'); + setIsInputFocused(false); + }} /> { + const cache: TasksCache = { + tasks, + categories, + lastSync: Date.now(), + version: this.currentCacheVersion + }; + + await AsyncStorage.setItem(CACHE_KEYS.TASKS_CACHE, JSON.stringify(cache)); + await AsyncStorage.setItem(CACHE_KEYS.LAST_SYNC_TIMESTAMP, cache.lastSync.toString()); + } + + // Metodo interno per leggere i task dalla cache senza deduplicazione ricorsiva + private async _getCachedTasksRaw(): Promise<{ tasks: Task[], categories: Category[] }> { + try { + const cachedData = await AsyncStorage.getItem(CACHE_KEYS.TASKS_CACHE); + + if (!cachedData) { + return { tasks: [], categories: [] }; + } + + const cache: TasksCache = JSON.parse(cachedData); + + // Verifica la versione della cache + if (cache.version !== this.currentCacheVersion) { + return { tasks: [], categories: [] }; + } + + return { tasks: cache.tasks || [], categories: cache.categories || [] }; + } catch (error) { + console.error('[CACHE] Errore nel caricamento raw dalla cache:', error); + return { tasks: [], categories: [] }; + } + } + // Carica i task dalla cache AsyncStorage async getCachedTasks(): Promise { try { @@ -84,9 +121,9 @@ class TaskCacheService { if (deduplicatedTasks.length < cache.tasks.length) { console.log(`[CACHE] 🧹 Rimossi ${cache.tasks.length - deduplicatedTasks.length} duplicati dalla cache`); - // Salva immediatamente la cache pulita - const categories = await this.getCachedCategories(); - await this.saveTasks(deduplicatedTasks, categories); + // Salva immediatamente la cache pulita usando il metodo diretto (evita loop ricorsivo) + const categories = cache.categories || []; + await this._saveTasksDirect(deduplicatedTasks, categories); } // Log dettagliato di ogni task in cache per debug @@ -120,11 +157,18 @@ class TaskCacheService { // Salva i task nella cache async saveTasks(tasks: Task[], categories: Category[] = []): Promise { + // Previeni loop ricorsivi + if (this.isSaving) { + console.log('[CACHE] ⚠️ saveTasks già in esecuzione, skip per prevenire loop'); + return; + } + try { + this.isSaving = true; console.log(`[CACHE] Salvando ${tasks.length} task in cache...`); - // Carica i task attuali dalla cache per confronto - const currentTasks = await this.getCachedTasks(); + // Carica i task attuali dalla cache per confronto usando il metodo raw (evita loop) + const { tasks: currentTasks } = await this._getCachedTasksRaw(); // Identifica task rimossi (presenti in cache ma non nei nuovi dati) const newTaskIds = new Set(tasks.map(task => task.task_id || task.id)); @@ -167,6 +211,8 @@ class TaskCacheService { } } catch (error) { console.error('[CACHE] Errore nel salvataggio in cache:', error); + } finally { + this.isSaving = false; } } diff --git a/src/services/taskService.ts b/src/services/taskService.ts index 0983e11..6665fbb 100644 --- a/src/services/taskService.ts +++ b/src/services/taskService.ts @@ -1,7 +1,6 @@ import axios from "./axiosInterceptor"; import AsyncStorage from "@react-native-async-storage/async-storage"; import { STORAGE_KEYS } from "../constants/authConstants"; -// eslint-disable-next-line import/no-named-as-default import TaskCacheService from './TaskCacheService'; import SyncManager from './SyncManager'; import { emitTaskAdded, emitTaskUpdated, emitTaskDeleted, emitTasksSynced } from '../utils/eventEmitter'; diff --git a/src/services/botservice.ts b/src/services/textBotService.ts similarity index 50% rename from src/services/botservice.ts rename to src/services/textBotService.ts index 10947e8..07e417a 100644 --- a/src/services/botservice.ts +++ b/src/services/textBotService.ts @@ -34,7 +34,7 @@ export async function sendMessageToBot( if (!token) { return "Mi dispiace, sembra che tu non sia autenticato. Effettua il login per continuare."; } - + // Costruisci il payload per la richiesta const requestPayload: any = { quest: userMessage, @@ -103,7 +103,7 @@ export async function sendMessageToBot( if (parsed.type === 'chat_info') { receivedChatId = parsed.chat_id; isNewChat = parsed.is_new; - console.log(`[BOTSERVICE] Chat info ricevuto: chat_id=${receivedChatId}, is_new=${isNewChat}`); + console.log(`[TEXTBOTSERVICE] Chat info ricevuto: chat_id=${receivedChatId}, is_new=${isNewChat}`); // Notifica UI del chat_id ricevuto if (onStreamChunk) { @@ -190,7 +190,7 @@ export async function sendMessageToBot( } catch (e: any) { widget.status = 'error'; widget.errorMessage = 'Errore parsing output tool'; - console.error('[BOTSERVICE] Error parsing tool output:', e); + console.error('[TEXTBOTSERVICE] Error parsing tool output:', e); } // IMPORTANTE: Aggiorna il widget nella posizione ORIGINALE, non creare un duplicato @@ -201,7 +201,7 @@ export async function sendMessageToBot( onStreamChunk('', false, Array.from(toolWidgetsMap.values())); } } else { - console.warn('[BOTSERVICE] Widget not found for index:', parsed.item_index); + console.warn('[TEXTBOTSERVICE] Widget not found for index:', parsed.item_index); } } @@ -263,7 +263,7 @@ export async function sendMessageToBot( chat_id: receivedChatId, is_new: isNewChat, }; - + } catch (error: any) { console.error("❌ Errore nella comunicazione con il bot:", error); @@ -315,7 +315,7 @@ export async function clearChatHistory(): Promise { console.log("✅ Cronologia chat eliminata dal server"); return true; - + } catch (error: any) { console.error("❌ Errore nell'eliminazione della cronologia chat:", error); return false; @@ -352,14 +352,14 @@ export function validateMessage(message: string): boolean { if (!message || typeof message !== 'string') { return false; } - + const trimmedMessage = message.trim(); - + // Controllo lunghezza minima e massima if (trimmedMessage.length === 0 || trimmedMessage.length > 5000) { return false; } - + return true; } @@ -372,52 +372,52 @@ export function formatMessage(message: string): string { if (!message || typeof message !== 'string') { return ""; } - + let formattedMessage = message.trim(); - + // Converte alcuni pattern comuni in Markdown // Titoli con emoji task formattedMessage = formattedMessage.replace( - /📅 TASK PER LA DATA (.+?):/g, + /📅 TASK PER LA DATA (.+?):/g, '## 📅 Task per la data $1\n\n' ); - + // Totale task trovati formattedMessage = formattedMessage.replace( /📊 Totale task trovati: (\d+)/g, '\n---\n**📊 Totale task trovati:** `$1`' ); - + // Pattern per evidenziare i numeri di task formattedMessage = formattedMessage.replace( /(\d+) task/g, '**$1** task' ); - + // Pattern per evidenziare le date formattedMessage = formattedMessage.replace( /(\d{4}-\d{2}-\d{2})/g, '`$1`' ); - + // Pattern per evidenziare gli orari formattedMessage = formattedMessage.replace( /(\d{2}:\d{2})/g, '`$1`' ); - + // Converti status in badge formattedMessage = formattedMessage.replace( /"status":\s*"([^"]+)"/g, '"status": **$1**' ); - + // Converti category_name in evidenziato formattedMessage = formattedMessage.replace( /"category_name":\s*"([^"]+)"/g, '"category_name": *$1*' ); - + return formattedMessage; } @@ -430,7 +430,7 @@ export function isStructuredResponse(response: string): boolean { if (!response || typeof response !== 'string') { return false; } - + try { const parsed = JSON.parse(response); return parsed && typeof parsed === 'object' && parsed.mode === 'view'; @@ -451,516 +451,3 @@ export function extractStructuredData(response: string): any { return null; } } - -// ============= VOICE CHAT WEBSOCKET ============= - -/** - * Tipi per i messaggi WebSocket della chat vocale - */ -export interface VoiceWebSocketMessage { - type: 'auth' | 'audio_chunk' | 'control'; - token?: string; - data?: string; // base64 audio data - is_final?: boolean; - action?: 'pause' | 'resume' | 'cancel'; -} - -export interface VoiceWebSocketResponse { - type: 'status' | 'audio_chunk' | 'error'; - phase?: 'authenticated' | 'receiving_audio' | 'transcription' | 'transcription_complete' | 'ai_processing' | 'ai_complete' | 'tts_generation' | 'tts_complete' | 'audio_streaming' | 'complete'; - message?: string; - data?: string; // base64 audio data - chunk_index?: number; -} - -/** - * Stati di autenticazione WebSocket - */ -export enum WebSocketAuthState { - DISCONNECTED = 'disconnected', - CONNECTING = 'connecting', - AUTHENTICATING = 'authenticating', - AUTHENTICATED = 'authenticated', - FAILED = 'failed' -} - -/** - * Interfaccia per messaggi in coda prima dell'autenticazione - */ -interface QueuedMessage { - type: 'audio_chunk' | 'control'; - data?: string; - is_final?: boolean; - action?: 'pause' | 'resume' | 'cancel'; -} - -/** - * Callback per gestire i diversi tipi di risposta dal WebSocket vocale - */ -export interface VoiceChatCallbacks { - onStatus?: (phase: string, message: string) => void; - onAudioChunk?: (audioData: string, chunkIndex?: number) => void; - onError?: (error: string) => void; - onConnectionOpen?: () => void; - onConnectionClose?: () => void; - onAuthenticationSuccess?: (message: string) => void; - onAuthenticationFailed?: (error: string) => void; -} - -const MAX_AUDIO_CHUNK_BYTES = 2_500_000; // ~2.5MB di audio PCM/mp3 - -/** - * Classe per gestire la connessione WebSocket per la chat vocale - */ -export class VoiceBotWebSocket { - private ws: WebSocket | null = null; - private callbacks: VoiceChatCallbacks; - private baseUrl: string = 'wss://taskly-production.up.railway.app'; - private reconnectAttempts: number = 0; - private maxReconnectAttempts: number = 3; - private reconnectDelay: number = 1000; - private authState: WebSocketAuthState = WebSocketAuthState.DISCONNECTED; - private messageQueue: QueuedMessage[] = []; - private authTimeout: NodeJS.Timeout | null = null; - private readonly AUTH_TIMEOUT_MS = 10000; // 10 secondi timeout per autenticazione - - constructor(callbacks: VoiceChatCallbacks) { - this.callbacks = callbacks; - } - - /** - * Connette al WebSocket per la chat vocale - */ - async connect(): Promise { - try { - const token = await getValidToken(); - if (!token) { - this.authState = WebSocketAuthState.FAILED; - this.callbacks.onError?.('Token di autenticazione non disponibile'); - return false; - } - - this.authState = WebSocketAuthState.CONNECTING; - const wsUrl = `${this.baseUrl}/chat/voice-bot-websocket`; - - // Crea una Promise che si risolve quando onopen viene chiamato - return new Promise((resolve, reject) => { - this.ws = new WebSocket(wsUrl); - - // Timeout per la connessione (10 secondi) - const connectionTimeout = setTimeout(() => { - this.authState = WebSocketAuthState.FAILED; - this.callbacks.onError?.('Timeout connessione WebSocket'); - reject(new Error('Timeout connessione WebSocket')); - }, 10000); - - this.ws.onopen = () => { - clearTimeout(connectionTimeout); - console.log('🎤 Connessione WebSocket vocale aperta'); - this.reconnectAttempts = 0; - - // Invia autenticazione e avvia timeout - this.startAuthentication(token); - this.callbacks.onConnectionOpen?.(); - resolve(true); - }; - - this.ws.onmessage = (event) => { - try { - const response: VoiceWebSocketResponse = JSON.parse(event.data); - this.handleResponse(response); - } catch (error) { - console.error('Errore parsing risposta WebSocket:', error); - this.callbacks.onError?.('Errore nel formato della risposta del server'); - } - }; - - this.ws.onerror = (error) => { - clearTimeout(connectionTimeout); - console.error('Errore WebSocket vocale:', error); - this.authState = WebSocketAuthState.FAILED; - this.clearAuthTimeout(); - this.callbacks.onError?.('Errore di connessione WebSocket'); - reject(new Error('Errore di connessione WebSocket')); - }; - - this.ws.onclose = (event) => { - clearTimeout(connectionTimeout); - console.log('🎤 Connessione WebSocket vocale chiusa:', event.code, event.reason); - this.authState = WebSocketAuthState.DISCONNECTED; - this.clearAuthTimeout(); - this.clearMessageQueue(); - this.callbacks.onConnectionClose?.(); - - // Tentativo di riconnessione automatica - if (this.reconnectAttempts < this.maxReconnectAttempts && event.code !== 1000) { - this.attemptReconnect(); - } - }; - }); - } catch (error) { - console.error('Errore connessione WebSocket vocale:', error); - this.authState = WebSocketAuthState.FAILED; - this.callbacks.onError?.('Impossibile connettersi al servizio vocale'); - return false; - } - } - - /** - * Avvia il processo di autenticazione - */ - private startAuthentication(token: string): void { - if (!this.isConnected()) return; - - this.authState = WebSocketAuthState.AUTHENTICATING; - - // Avvia timeout per autenticazione - this.authTimeout = setTimeout(() => { - this.handleAuthenticationTimeout(); - }, this.AUTH_TIMEOUT_MS); - - // Assicurati che il token abbia il prefisso "Bearer " - const formattedToken = token.startsWith('Bearer ') ? token : `Bearer ${token}`; - - const authMessage: VoiceWebSocketMessage = { - type: 'auth', - token: formattedToken - }; - - console.log('🔐 Invio autenticazione JWT'); - this.ws!.send(JSON.stringify(authMessage)); - } - - /** - * Gestisce il timeout dell'autenticazione - */ - private handleAuthenticationTimeout(): void { - console.error('⏰ Timeout autenticazione WebSocket'); - this.authState = WebSocketAuthState.FAILED; - this.callbacks.onAuthenticationFailed?.('Timeout autenticazione - il server non ha risposto'); - this.disconnect(); - } - - /** - * Pulisce il timeout di autenticazione - */ - private clearAuthTimeout(): void { - if (this.authTimeout) { - clearTimeout(this.authTimeout); - this.authTimeout = null; - } - } - - /** - * Gestisce le risposte ricevute dal WebSocket - */ - private handleResponse(response: VoiceWebSocketResponse): void { - // Validazione sicurezza messaggio - if (!this.validateResponse(response)) { - console.warn('Messaggio WebSocket non valido ricevuto:', response); - return; - } - - switch (response.type) { - case 'status': - this.handleStatusResponse(response); - break; - - case 'audio_chunk': - this.handleAudioChunkResponse(response); - break; - - case 'error': - this.handleErrorResponse(response); - break; - - default: - console.warn('Tipo di risposta WebSocket sconosciuto:', response.type); - } - } - - /** - * Gestisce risposta di stato (inclusa autenticazione) - */ - private handleStatusResponse(response: VoiceWebSocketResponse): void { - if (!response.phase || !response.message) return; - - switch (response.phase) { - case 'authenticated': - this.handleAuthenticationSuccess(response.message); - break; - - case 'receiving_audio': - case 'transcription': - case 'transcription_complete': - case 'ai_processing': - case 'ai_complete': - case 'tts_generation': - case 'tts_complete': - case 'audio_streaming': - case 'complete': - this.callbacks.onStatus?.(response.phase, response.message); - break; - - default: - console.warn('Fase WebSocket sconosciuta:', response.phase); - } - } - - /** - * Gestisce il successo dell'autenticazione - */ - private handleAuthenticationSuccess(message: string): void { - console.log('✅ Autenticazione WebSocket riuscita:', message); - this.authState = WebSocketAuthState.AUTHENTICATED; - this.clearAuthTimeout(); - - // Processa messaggi in coda - this.processQueuedMessages(); - - this.callbacks.onAuthenticationSuccess?.(message); - this.callbacks.onStatus?.('authenticated', message); - } - - /** - * Gestisce risposta audio chunk - */ - private handleAudioChunkResponse(response: VoiceWebSocketResponse): void { - // Permetti audio chunks anche durante l'autenticazione - // Il server potrebbe iniziare a inviare dati prima della conferma ufficiale - if (this.authState === WebSocketAuthState.DISCONNECTED) { - console.warn('Ricevuto audio chunk senza connessione'); - return; - } - - if (response.data) { - this.callbacks.onAudioChunk?.(response.data, response.chunk_index); - } - } - - /** - * Gestisce risposta di errore - */ - private handleErrorResponse(response: VoiceWebSocketResponse): void { - if (!response.message) return; - - // Gestione errori specifici di autenticazione - if (this.authState === WebSocketAuthState.AUTHENTICATING) { - console.error('❌ Errore autenticazione:', response.message); - this.authState = WebSocketAuthState.FAILED; - this.clearAuthTimeout(); - this.callbacks.onAuthenticationFailed?.(response.message); - } else { - this.callbacks.onError?.(response.message); - } - } - - /** - * Invia un chunk audio al server - */ - sendAudioChunk(base64AudioData: string, isFinal: boolean = false): void { - if (!this.isConnected()) { - this.callbacks.onError?.('Connessione WebSocket non disponibile'); - return; - } - - const audioMessage: QueuedMessage = { - type: 'audio_chunk', - data: base64AudioData, - is_final: isFinal - }; - - // Se non autenticato, metti in coda il messaggio - if (!this.isAuthenticated()) { - console.log('🔒 Messaggio audio messo in coda (non autenticato)'); - this.messageQueue.push(audioMessage); - return; - } - - this.sendMessage(audioMessage); - } - - /** - * Invia comandi di controllo (pause, resume, cancel) - */ - sendControl(action: 'pause' | 'resume' | 'cancel'): void { - if (!this.isConnected()) { - this.callbacks.onError?.('Connessione WebSocket non disponibile'); - return; - } - - const controlMessage: QueuedMessage = { - type: 'control', - action: action - }; - - // I comandi di controllo possono essere inviati anche senza autenticazione - // per permettere di cancellare operazioni in corso - if (!this.isAuthenticated() && action !== 'cancel') { - console.log('🔒 Comando di controllo messo in coda (non autenticato)'); - this.messageQueue.push(controlMessage); - return; - } - - this.sendMessage(controlMessage); - } - - /** - * Controlla se la connessione WebSocket è attiva - */ - isConnected(): boolean { - return this.ws !== null && this.ws.readyState === WebSocket.OPEN; - } - - /** - * Controlla se l'utente è autenticato - */ - isAuthenticated(): boolean { - return this.authState === WebSocketAuthState.AUTHENTICATED; - } - - /** - * Ottiene lo stato di autenticazione corrente - */ - getAuthState(): WebSocketAuthState { - return this.authState; - } - - /** - * Invia un messaggio al WebSocket - */ - private sendMessage(message: QueuedMessage): void { - if (!this.isConnected()) return; - - const wsMessage: VoiceWebSocketMessage = { - type: message.type, - data: message.data, - is_final: message.is_final, - action: message.action - }; - - this.ws!.send(JSON.stringify(wsMessage)); - } - - /** - * Processa i messaggi in coda dopo l'autenticazione - */ - private processQueuedMessages(): void { - if (this.messageQueue.length === 0) return; - - console.log(`📤 Processando ${this.messageQueue.length} messaggi in coda`); - - while (this.messageQueue.length > 0) { - const message = this.messageQueue.shift(); - if (message) { - this.sendMessage(message); - } - } - } - - /** - * Pulisce la coda dei messaggi - */ - private clearMessageQueue(): void { - if (this.messageQueue.length > 0) { - console.log(`🗑️ Pulisco ${this.messageQueue.length} messaggi in coda`); - this.messageQueue = []; - } - } - - /** - * Valida la sicurezza di una risposta WebSocket - */ - private validateResponse(response: VoiceWebSocketResponse): boolean { - // Controlli di sicurezza di base - if (!response || typeof response !== 'object') { - return false; - } - - // Verifica che il tipo sia valido - const validTypes = ['status', 'audio_chunk', 'error']; - if (!validTypes.includes(response.type)) { - return false; - } - - // Verifica lunghezza messaggi per prevenire DoS - if (response.message && response.message.length > 1000) { - console.warn('Messaggio troppo lungo ricevuto dal server'); - return false; - } - - // Verifica chunk audio per prevenire overflow - if (response.data) { - if (response.type === 'audio_chunk') { - const approxChunkBytes = Math.floor(response.data.length * 0.75); - if (approxChunkBytes > MAX_AUDIO_CHUNK_BYTES) { - console.warn(`Chunk audio molto grande ricevuto dal server (~${approxChunkBytes} bytes)`); - } - } else if (response.data.length > 50000) { - console.warn('Payload dati troppo grande ricevuto dal server'); - return false; - } - } - - return true; - } - - /** - * Tentativo di riconnessione automatica - */ - private attemptReconnect(): void { - this.reconnectAttempts++; - const delay = this.reconnectDelay * this.reconnectAttempts; - - console.log(`🎤 Tentativo riconnessione ${this.reconnectAttempts}/${this.maxReconnectAttempts} in ${delay}ms`); - - setTimeout(() => { - this.connect(); - }, delay); - } - - /** - * Disconnette il WebSocket - */ - disconnect(): void { - // Pulisce timeout e risorse - this.clearAuthTimeout(); - this.clearMessageQueue(); - this.authState = WebSocketAuthState.DISCONNECTED; - - if (this.ws) { - this.ws.close(1000, 'Disconnessione volontaria'); - this.ws = null; - } - } - - /** - * Distrugge la connessione e pulisce tutte le risorse - */ - destroy(): void { - this.disconnect(); - this.callbacks = {}; - this.reconnectAttempts = 0; - } - - /** - * Forza una nuova autenticazione (utile se il token è stato aggiornato) - */ - async reAuthenticate(): Promise { - if (!this.isConnected()) { - console.warn('Non è possibile ri-autenticarsi: WebSocket non connesso'); - return false; - } - - const token = await getValidToken(); - if (!token) { - this.callbacks.onError?.('Token di autenticazione non disponibile per ri-autenticazione'); - return false; - } - - console.log('🔄 Avvio ri-autenticazione'); - this.clearMessageQueue(); // Pulisce eventuali messaggi precedenti - this.startAuthentication(token); - return true; - } -} diff --git a/src/services/voiceBotService.ts b/src/services/voiceBotService.ts new file mode 100644 index 0000000..63a2f60 --- /dev/null +++ b/src/services/voiceBotService.ts @@ -0,0 +1,512 @@ +import { getValidToken } from "./authService"; + +// ============= VOICE CHAT WEBSOCKET (OpenAI Realtime API) ============= + +/** + * Tipi per i messaggi client -> server (JSON text frames) + */ +export interface VoiceAuthMessage { + type: 'auth'; + token: string; +} + +export interface VoiceTextMessage { + type: 'text'; + content: string; +} + +export type VoiceClientMessage = + | VoiceAuthMessage + | VoiceTextMessage; + +/** + * NOTA: L'audio viene inviato come WebSocket binary frame (raw PCM16 bytes), + * NON come messaggio JSON. Vedere sendAudio() per i dettagli. + */ + +/** + * Tipi per i messaggi server -> client + */ +export type VoiceServerPhase = + | 'authenticated' + | 'ready' + | 'speech_started' // Utente ha iniziato a parlare (VAD di OpenAI) + | 'speech_stopped' // Utente ha finito di parlare (VAD di OpenAI) + | 'interrupted' + | 'audio_end' + | 'agent_start' + | 'agent_end'; + +export interface VoiceStatusResponse { + type: 'status'; + phase: VoiceServerPhase; + message?: string; +} + +export interface VoiceAudioResponse { + type: 'audio'; + data: string; // base64 PCM16 + chunk_index: number; +} + +export interface VoiceTranscriptResponse { + type: 'transcript'; + role: 'user' | 'assistant'; + content: string; +} + +export interface VoiceToolCallResponse { + type: 'tool_call'; + tool_name: string; + tool_args: string; +} + +export interface VoiceToolOutputResponse { + type: 'tool_output'; + tool_name: string; + output: string; +} + +export interface VoiceErrorResponse { + type: 'error'; + message: string; +} + +export interface VoiceDoneResponse { + type: 'done'; +} + +export type VoiceServerMessage = + | VoiceStatusResponse + | VoiceAudioResponse + | VoiceTranscriptResponse + | VoiceToolCallResponse + | VoiceToolOutputResponse + | VoiceErrorResponse + | VoiceDoneResponse; + +/** + * Stati di autenticazione/connessione WebSocket + */ +export enum WebSocketAuthState { + DISCONNECTED = 'disconnected', + CONNECTING = 'connecting', + AUTHENTICATING = 'authenticating', + AUTHENTICATED = 'authenticated', + READY = 'ready', + FAILED = 'failed' +} + +/** + * Callback per gestire i diversi tipi di risposta dal WebSocket vocale + */ +export interface VoiceChatCallbacks { + onStatus?: (phase: VoiceServerPhase, message: string) => void; + onAudioChunk?: (audioData: string, chunkIndex: number) => void; + onTranscript?: (role: 'user' | 'assistant', content: string) => void; + onToolCall?: (toolName: string, args: string) => void; + onToolOutput?: (toolName: string, output: string) => void; + onError?: (error: string) => void; + onConnectionOpen?: () => void; + onConnectionClose?: () => void; + onAuthenticationSuccess?: (message: string) => void; + onAuthenticationFailed?: (error: string) => void; + onReady?: () => void; + onDone?: () => void; +} + +/** + * Specifiche audio per il WebSocket vocale: + * - Formato: PCM16 (signed 16-bit little-endian) + * - Sample rate: 24000 Hz + * - Canali: 1 (mono) + * - Byte per sample: 2 + * - Dimensione chunk consigliata: 4800 bytes (100ms di audio @ 24kHz) + * - Intervallo invio: ogni 100ms + */ +export const VOICE_AUDIO_SAMPLE_RATE = 24000; +export const VOICE_AUDIO_CHANNELS = 1; +export const VOICE_AUDIO_BYTES_PER_SAMPLE = 2; +export const VOICE_RECOMMENDED_CHUNK_SIZE_BYTES = 4800; // 100ms @ 24kHz mono PCM16 +export const VOICE_CHUNK_INTERVAL_MS = 100; // Intervallo di invio consigliato +const MAX_AUDIO_CHUNK_BYTES = 2_500_000; // Safety limit per validazione + +/** + * Classe per gestire la connessione WebSocket per la chat vocale + * Compatibile con l'OpenAI Realtime API tramite il backend + * + * IMPORTANTE: + * - L'audio viene inviato come binary frame (raw PCM16 bytes) continuamente + * - Il VAD (Voice Activity Detection) è gestito automaticamente da OpenAI + * - Non serve inviare messaggi di commit o interrupt (gestiti automaticamente) + * - Il microfono resta sempre attivo, anche durante le risposte dell'assistente + */ +export class VoiceBotWebSocket { + private ws: WebSocket | null = null; + private callbacks: VoiceChatCallbacks; + private baseUrl: string = 'wss://taskly-production.up.railway.app'; + private reconnectAttempts: number = 0; + private maxReconnectAttempts: number = 3; + private reconnectDelay: number = 1000; + private authState: WebSocketAuthState = WebSocketAuthState.DISCONNECTED; + private messageQueue: VoiceClientMessage[] = []; + private authTimeout: NodeJS.Timeout | null = null; + private readonly AUTH_TIMEOUT_MS = 15000; // 15s timeout (setup MCP + RealtimeAgent) + + constructor(callbacks: VoiceChatCallbacks) { + this.callbacks = callbacks; + } + + /** + * Connette al WebSocket per la chat vocale + */ + async connect(): Promise { + try { + const token = await getValidToken(); + if (!token) { + this.authState = WebSocketAuthState.FAILED; + this.callbacks.onError?.('Token di autenticazione non disponibile'); + return false; + } + + this.authState = WebSocketAuthState.CONNECTING; + const wsUrl = `${this.baseUrl}/chat/voice-bot-websocket`; + + return new Promise((resolve, reject) => { + this.ws = new WebSocket(wsUrl); + + const connectionTimeout = setTimeout(() => { + this.authState = WebSocketAuthState.FAILED; + this.callbacks.onError?.('Timeout connessione WebSocket'); + reject(new Error('Timeout connessione WebSocket')); + }, 10000); + + this.ws.onopen = () => { + clearTimeout(connectionTimeout); + this.reconnectAttempts = 0; + this.startAuthentication(token); + this.callbacks.onConnectionOpen?.(); + resolve(true); + }; + + this.ws.onmessage = (event) => { + try { + const response = JSON.parse(event.data); + this.handleResponse(response); + } catch (error) { + console.error('Errore parsing risposta WebSocket:', error); + this.callbacks.onError?.('Errore nel formato della risposta del server'); + } + }; + + this.ws.onerror = (error) => { + clearTimeout(connectionTimeout); + console.error('Errore WebSocket vocale:', error); + this.authState = WebSocketAuthState.FAILED; + this.clearAuthTimeout(); + this.callbacks.onError?.('Errore di connessione WebSocket'); + reject(new Error('Errore di connessione WebSocket')); + }; + + this.ws.onclose = (event) => { + clearTimeout(connectionTimeout); + this.authState = WebSocketAuthState.DISCONNECTED; + this.clearAuthTimeout(); + this.messageQueue = []; + this.callbacks.onConnectionClose?.(); + + if (this.reconnectAttempts < this.maxReconnectAttempts && event.code !== 1000) { + this.attemptReconnect(); + } + }; + }); + } catch (error) { + console.error('Errore connessione WebSocket vocale:', error); + this.authState = WebSocketAuthState.FAILED; + this.callbacks.onError?.('Impossibile connettersi al servizio vocale'); + return false; + } + } + + /** + * Avvia il processo di autenticazione + */ + private startAuthentication(token: string): void { + if (!this.isConnected()) return; + + this.authState = WebSocketAuthState.AUTHENTICATING; + + this.authTimeout = setTimeout(() => { + this.authState = WebSocketAuthState.FAILED; + this.callbacks.onAuthenticationFailed?.('Timeout autenticazione - il server non ha risposto'); + this.disconnect(); + }, this.AUTH_TIMEOUT_MS); + + const authMessage: VoiceAuthMessage = { + type: 'auth', + token: token.startsWith('Bearer ') ? token : `Bearer ${token}` + }; + + this.ws!.send(JSON.stringify(authMessage)); + } + + private clearAuthTimeout(): void { + if (this.authTimeout) { + clearTimeout(this.authTimeout); + this.authTimeout = null; + } + } + + /** + * Gestisce le risposte ricevute dal WebSocket + */ + private handleResponse(response: VoiceServerMessage): void { + if (!this.validateResponse(response)) { + console.warn('Messaggio WebSocket non valido ricevuto:', response); + return; + } + + switch (response.type) { + case 'status': + this.handleStatusResponse(response as VoiceStatusResponse); + break; + + case 'audio': + this.handleAudioResponse(response as VoiceAudioResponse); + break; + + case 'transcript': + this.handleTranscriptResponse(response as VoiceTranscriptResponse); + break; + + case 'tool_call': + this.callbacks.onToolCall?.( + (response as VoiceToolCallResponse).tool_name, + (response as VoiceToolCallResponse).tool_args + ); + break; + + case 'tool_output': + this.callbacks.onToolOutput?.( + (response as VoiceToolOutputResponse).tool_name, + (response as VoiceToolOutputResponse).output + ); + break; + + case 'error': + this.handleErrorResponse(response as VoiceErrorResponse); + break; + + case 'done': + this.callbacks.onDone?.(); + break; + } + } + + /** + * Gestisce risposta di stato + */ + private handleStatusResponse(response: VoiceStatusResponse): void { + const phase = response.phase; + const message = response.message || ''; + + console.log(`[VoiceBotWebSocket] handleStatusResponse: phase=${phase}, message=${message}`); + + switch (phase) { + case 'authenticated': + console.log('Autenticazione WebSocket riuscita:', message); + this.authState = WebSocketAuthState.AUTHENTICATED; + this.callbacks.onAuthenticationSuccess?.(message); + this.callbacks.onStatus?.(phase, message); + // Non processare la coda qui - aspettare 'ready' + break; + + case 'ready': + console.log('Sessione vocale pronta'); + this.authState = WebSocketAuthState.READY; + this.clearAuthTimeout(); + this.processQueuedMessages(); + this.callbacks.onReady?.(); + this.callbacks.onStatus?.(phase, message); + break; + + case 'speech_started': + case 'speech_stopped': + case 'interrupted': + case 'audio_end': + case 'agent_start': + case 'agent_end': + this.callbacks.onStatus?.(phase, message); + break; + + default: + console.warn('Fase WebSocket sconosciuta:', phase); + } + } + + /** + * Gestisce risposta audio PCM16 + */ + private handleAudioResponse(response: VoiceAudioResponse): void { + if (this.authState === WebSocketAuthState.DISCONNECTED) return; + + if (response.data) { + this.callbacks.onAudioChunk?.(response.data, response.chunk_index); + } + } + + /** + * Gestisce risposta di trascrizione + */ + private handleTranscriptResponse(response: VoiceTranscriptResponse): void { + this.callbacks.onTranscript?.(response.role, response.content); + } + + /** + * Gestisce risposta di errore + */ + private handleErrorResponse(response: VoiceErrorResponse): void { + if (!response.message) return; + + if (this.authState === WebSocketAuthState.AUTHENTICATING) { + this.authState = WebSocketAuthState.FAILED; + this.clearAuthTimeout(); + this.callbacks.onAuthenticationFailed?.(response.message); + } else { + this.callbacks.onError?.(response.message); + } + } + + /** + * Invia un chunk audio PCM16 raw al server come binary frame + * + * @param pcm16Data - Raw PCM16 bytes (ArrayBuffer o Uint8Array) + * Formato: 24kHz, mono, 16-bit little-endian + * Dimensione consigliata: 4800 bytes (100ms) + * + * IMPORTANTE: Il microfono deve inviare audio continuamente dal momento + * in cui si riceve "ready" fino alla chiusura del WebSocket. + * OpenAI gestisce automaticamente VAD e interruzioni. + */ + sendAudio(pcm16Data: ArrayBuffer | Uint8Array): void { + // Non logghiamo errori se la connessione è già chiusa per evitare spam di log + // In questo caso semplicemente scartiamo l'audio silenziosamente + if (!this.isConnected() || !this.isReady()) { + return; + } + + const bytes = pcm16Data instanceof Uint8Array ? pcm16Data.buffer : pcm16Data; + + // Invia come binary frame (NON JSON) + this.ws!.send(bytes); + } + + /** + * Invia un messaggio di testo all'assistente + */ + sendText(content: string): void { + const msg: VoiceTextMessage = { type: 'text', content }; + this.sendOrQueue(msg); + } + + /** + * Invia un messaggio o lo mette in coda se non ancora pronto + */ + private sendOrQueue(message: VoiceClientMessage): void { + if (!this.isConnected()) { + // Non logghiamo errori se la connessione è già chiusa per evitare spam di log + return; + } + + if (!this.isReady()) { + this.messageQueue.push(message); + return; + } + + const json = JSON.stringify(message); + this.ws!.send(json); + } + + /** + * Processa i messaggi in coda dopo che la sessione e' pronta + */ + private processQueuedMessages(): void { + if (this.messageQueue.length === 0) return; + + while (this.messageQueue.length > 0) { + const message = this.messageQueue.shift(); + if (message && this.isConnected()) { + this.ws!.send(JSON.stringify(message)); + } + } + } + + /** + * Valida una risposta WebSocket + */ + private validateResponse(response: any): response is VoiceServerMessage { + if (!response || typeof response !== 'object') return false; + + const validTypes = ['status', 'audio', 'transcript', 'tool_call', 'tool_output', 'error', 'done']; + if (!validTypes.includes(response.type)) return false; + + // Verifica lunghezza messaggi + if (response.message && typeof response.message === 'string' && response.message.length > 5000) { + console.warn('Messaggio troppo lungo ricevuto dal server'); + return false; + } + + // Verifica chunk audio + if (response.type === 'audio' && response.data) { + const approxBytes = Math.floor(response.data.length * 0.75); + if (approxBytes > MAX_AUDIO_CHUNK_BYTES) { + console.warn(`Chunk audio molto grande (~${approxBytes} bytes)`); + } + } + + return true; + } + + isConnected(): boolean { + return this.ws !== null && this.ws.readyState === WebSocket.OPEN; + } + + isAuthenticated(): boolean { + return this.authState === WebSocketAuthState.AUTHENTICATED || + this.authState === WebSocketAuthState.READY; + } + + isReady(): boolean { + return this.authState === WebSocketAuthState.READY; + } + + getAuthState(): WebSocketAuthState { + return this.authState; + } + + private attemptReconnect(): void { + this.reconnectAttempts++; + const delay = this.reconnectDelay * this.reconnectAttempts; + + setTimeout(() => { + this.connect(); + }, delay); + } + + disconnect(): void { + this.clearAuthTimeout(); + this.messageQueue = []; + this.authState = WebSocketAuthState.DISCONNECTED; + + if (this.ws) { + this.ws.close(1000, 'Disconnessione volontaria'); + this.ws = null; + } + } + + destroy(): void { + this.disconnect(); + this.callbacks = {}; + this.reconnectAttempts = 0; + } +} diff --git a/src/utils/audioUtils.ts b/src/utils/audioUtils.ts index 4f07997..eab6fe8 100644 --- a/src/utils/audioUtils.ts +++ b/src/utils/audioUtils.ts @@ -1,9 +1,12 @@ import { Audio } from 'expo-av'; import * as FileSystem from 'expo-file-system'; +import { VoiceProcessor } from '@picovoice/react-native-voice-processor'; /** * Utility per la gestione dell'audio nella chat vocale - * Include registrazione, conversione base64, e riproduzione + * Registrazione: @picovoice/react-native-voice-processor (streaming frames PCM16) + * Riproduzione: expo-av (playback WAV) + * Server richiede PCM16 a 24kHz, VoiceProcessor registra a 16kHz -> resample necessario */ /** @@ -12,13 +15,10 @@ import * as FileSystem from 'expo-file-system'; */ function decodeBase64(base64: string): Uint8Array { const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; - // Rimuove eventuali caratteri non-base64 (es. newline) const sanitized = base64.replace(/[^A-Za-z0-9+/=]/g, ''); - // Padding per lunghezza non multipla di 4 (comportamento analogo a Buffer.from) const padLength = (4 - (sanitized.length % 4)) % 4; const padded = sanitized.padEnd(sanitized.length + padLength, '='); - // Calcola la lunghezza del buffer tenendo conto del padding let bufferLength = padded.length * 0.75; if (padded.endsWith('==')) bufferLength -= 2; else if (padded.endsWith('=')) bufferLength -= 1; @@ -56,7 +56,6 @@ function encodeBase64(bytes: Uint8Array): string { while (i < bytes.length) { const a = bytes[i++]; - // Traccia quanti byte veri abbiamo per questo gruppo const hasB = i < bytes.length; const b = hasB ? bytes[i++] : 0; const hasC = i < bytes.length; @@ -73,168 +72,162 @@ function encodeBase64(bytes: Uint8Array): string { return result; } -// Configurazioni audio +// Configurazioni audio per OpenAI Realtime API export const AUDIO_CONFIG = { - SAMPLE_RATE: 16000, + SAMPLE_RATE: 24000, // Sample rate registrazione e server (VoiceProcessor supporta 24kHz diretto) CHANNELS: 1, BIT_DEPTH: 16, - CHUNK_DURATION: 1000, // ms - MAX_RECORDING_TIME: 60000, // 60 secondi - AUDIO_FORMAT: Audio.RecordingOptionsPresets.HIGH_QUALITY.android.extension || 'm4a' + FRAME_LENGTH: 1024, // Numero di campioni per frame (VoiceProcessor) + MAX_RECORDING_TIME: 300000, // 5 minuti per sessioni conversazionali }; -// Configurazioni VAD (Voice Activity Detection) -export const VAD_CONFIG = { - SPEECH_THRESHOLD_DB: -50, // dB sopra questa soglia = voce rilevata (più sensibile) - SILENCE_THRESHOLD_DB: -60, // dB sotto questa soglia = silenzio - SILENCE_DURATION_MS: 1200, // Durata silenzio prima di fermare (1.2s) - METERING_POLL_INTERVAL_MS: 100, // Intervallo controllo livello audio (100ms) - MIN_RECORDING_DURATION_MS: 300, // Durata minima registrazione prima di VAD (300ms) -}; - -// Configurazione per normalizzazione audio level -export const AUDIO_LEVEL_CONFIG = { - MIN_DB: -80, // Livello di silenzio tipico - MAX_DB: -10, // Livello di voce forte -}; - -// Configurazione per voice chat chunk flow control -export const VOICE_CHUNK_CONFIG = { - // Minimum chunks to buffer before starting playback - MIN_CHUNKS_BEFORE_PLAYBACK: 3, - - // Maximum wait time for chunks (ms) before starting with available - MAX_BUFFER_WAIT_MS: 2000, - // Burst detection threshold (inter-arrival time < this = burst) - BURST_DETECTION_THRESHOLD_MS: 10, +/** + * Converte un array di campioni Int16 (number[]) in Uint8Array little-endian PCM16. + * VoiceProcessor fornisce frames come number[], il server richiede PCM16 bytes. + */ +function int16ArrayToBytes(samples: number[]): Uint8Array { + const buffer = new Uint8Array(samples.length * 2); + const view = new DataView(buffer.buffer); + for (let i = 0; i < samples.length; i++) { + view.setInt16(i * 2, samples[i], true); // little-endian + } + return buffer; +} - // Warning threshold for low buffer during playback - LOW_BUFFER_WARNING_THRESHOLD: 1, -}; +/** + * Crea un header WAV per dati audio PCM16. + * Necessario perche' expo-av richiede un formato file per la riproduzione. + */ +export function createWavHeader( + pcm16DataLength: number, + sampleRate: number = 24000, + channels: number = 1, + bitsPerSample: number = 16 +): Uint8Array { + const header = new Uint8Array(44); + const view = new DataView(header.buffer); + + const byteRate = sampleRate * channels * (bitsPerSample / 8); + const blockAlign = channels * (bitsPerSample / 8); + const fileSize = 36 + pcm16DataLength; + + // "RIFF" chunk descriptor + header.set([0x52, 0x49, 0x46, 0x46], 0); // "RIFF" + view.setUint32(4, fileSize, true); + header.set([0x57, 0x41, 0x56, 0x45], 8); // "WAVE" + + // "fmt " sub-chunk + header.set([0x66, 0x6d, 0x74, 0x20], 12); // "fmt " + view.setUint32(16, 16, true); // sub-chunk size (16 for PCM) + view.setUint16(20, 1, true); // audio format (1 = PCM) + view.setUint16(22, channels, true); + view.setUint32(24, sampleRate, true); + view.setUint32(28, byteRate, true); + view.setUint16(32, blockAlign, true); + view.setUint16(34, bitsPerSample, true); + + // "data" sub-chunk + header.set([0x64, 0x61, 0x74, 0x61], 36); // "data" + view.setUint32(40, pcm16DataLength, true); + + return header; +} /** - * Callback per eventi VAD + * Wrappa dati PCM16 raw con un header WAV. + * Restituisce un file WAV completo come Uint8Array. */ -export interface VADCallbacks { - onSpeechStart?: () => void; - onSpeechEnd?: () => void; - onSilenceDetected?: () => void; - onAutoStop?: () => void; - onMeteringUpdate?: (level: number) => void; +export function wrapPcm16InWav(pcm16Data: Uint8Array, sampleRate: number = 24000): Uint8Array { + const header = createWavHeader(pcm16Data.length, sampleRate); + const wav = new Uint8Array(header.length + pcm16Data.length); + wav.set(header, 0); + wav.set(pcm16Data, header.length); + return wav; } /** - * Classe per gestire la registrazione audio + * Classe per gestire la registrazione audio con @picovoice/react-native-voice-processor. + * Fornisce streaming di chunks PCM16 base64 a 24kHz direttamente. */ export class AudioRecorder { - private recording: Audio.Recording | null = null; private isRecording: boolean = false; private recordingStartTime: number = 0; - - // VAD properties - private meteringInterval: NodeJS.Timeout | null = null; - private silenceStartTime: number | null = null; - private vadEnabled: boolean = false; - private vadCallbacks: VADCallbacks = {}; - private isSpeechDetected: boolean = false; + private onChunkCallback: ((base64Chunk: string) => void) | null = null; + private frameListener: ((frame: number[]) => void) | null = null; + private errorListener: ((error: any) => void) | null = null; + private voiceProcessor: VoiceProcessor = VoiceProcessor.instance; /** - * Inizializza e avvia la registrazione audio + * Avvia la registrazione audio con streaming frames. + * VoiceProcessor registra direttamente a 24kHz, ogni frame viene convertito in PCM16 base64. */ - async startRecording(enableVAD: boolean = false, vadCallbacks?: VADCallbacks): Promise { - console.log('🎙️ RECORDER: startRecording chiamato, VAD:', enableVAD); - + async startRecording( + onChunk?: (base64Chunk: string) => void + ): Promise { try { - // Richiedi i permessi per il microfono - console.log('🔐 RECORDER: Richiesta permessi microfono...'); - const { granted } = await Audio.requestPermissionsAsync(); - console.log('🔐 RECORDER: Permessi microfono:', granted ? '✅ Concessi' : '❌ Negati'); + this.onChunkCallback = onChunk || null; - if (!granted) { - console.error('❌ RECORDER: Permessi microfono non concessi'); + // Verifica permessi microfono + if (!(await this.voiceProcessor.hasRecordAudioPermission())) { + console.error('Permesso microfono non concesso'); return false; } - // Configura la modalità audio per la registrazione - console.log('⚙️ RECORDER: Configurazione modalità audio...'); - await Audio.setAudioModeAsync({ - allowsRecordingIOS: true, - playsInSilentModeIOS: true, - staysActiveInBackground: true, - shouldDuckAndroid: true, - playThroughEarpieceAndroid: false, - }); - console.log('✅ RECORDER: Modalità audio configurata'); + // Listener per i frames audio: number[] di campioni Int16 @ 24kHz + this.frameListener = (frame: number[]) => { + if (!this.isRecording) return; - // Crea una nuova registrazione - console.log('📼 RECORDER: Creazione istanza Recording...'); - this.recording = new Audio.Recording(); + try { + // Converti campioni Int16 in bytes PCM16 little-endian + const pcm16Bytes = int16ArrayToBytes(frame); + + // Encode in base64 e invia + const base64Chunk = encodeBase64(pcm16Bytes); + this.onChunkCallback?.(base64Chunk); + } catch (error) { + console.error('Errore processamento frame audio:', error); + } + }; - // Configura le opzioni di registrazione con metering se VAD è abilitato - const recordingOptions = { - ...Audio.RecordingOptionsPresets.HIGH_QUALITY, - isMeteringEnabled: enableVAD, + // Listener per errori + this.errorListener = (error: any) => { + console.error('VoiceProcessor errore:', error); }; - console.log('⚙️ RECORDER: Opzioni registrazione:', recordingOptions); - // Prepara e avvia la registrazione - console.log('🎬 RECORDER: Preparazione registrazione...'); - await this.recording.prepareToRecordAsync(recordingOptions); - console.log('✅ RECORDER: Registrazione preparata'); + this.voiceProcessor.addFrameListener(this.frameListener); + this.voiceProcessor.addErrorListener(this.errorListener); - console.log('▶️ RECORDER: Avvio registrazione...'); - await this.recording.startAsync(); - console.log('✅ RECORDER: Registrazione avviata!'); + // Avvia la registrazione a 24kHz + await this.voiceProcessor.start(AUDIO_CONFIG.FRAME_LENGTH, AUDIO_CONFIG.SAMPLE_RATE); this.isRecording = true; this.recordingStartTime = Date.now(); - this.vadEnabled = enableVAD; - this.vadCallbacks = vadCallbacks || {}; - // Avvia il monitoraggio VAD se abilitato - if (enableVAD) { - console.log('🎚️ RECORDER: Avvio monitoraggio VAD...'); - this.startVADMonitoring(); - } - - console.log('🎤 Registrazione audio iniziata', enableVAD ? '(VAD attivo)' : '(VAD disattivo)'); + console.log('Registrazione VoiceProcessor avviata - streaming PCM16 a 24kHz'); return true; } catch (error) { - console.error('❌ RECORDER: Errore avvio registrazione:', error); - console.error('❌ RECORDER: Stack trace:', error); + console.error('Errore avvio registrazione:', error); this.cleanup(); return false; } } /** - * Ferma la registrazione e restituisce i dati audio + * Ferma la registrazione. I chunks sono gia' stati inviati in streaming. */ async stopRecording(): Promise { - if (!this.recording || !this.isRecording) { + if (!this.isRecording) { console.warn('Nessuna registrazione attiva'); return null; } try { - await this.recording.stopAndUnloadAsync(); - const uri = this.recording.getURI(); - - if (!uri) { - console.error('URI della registrazione non disponibile'); - return null; - } - - // Converti il file audio in base64 - const base64Data = await this.convertAudioToBase64(uri); - + await this.voiceProcessor.stop(); this.isRecording = false; - console.log('🎤 Registrazione completata'); - - return base64Data; - + console.log('Registrazione completata'); + return null; } catch (error) { console.error('Errore stop registrazione:', error); return null; @@ -243,83 +236,19 @@ export class AudioRecorder { } } - /** - * Converti un file audio in formato base64 - */ - private async convertAudioToBase64(audioUri: string): Promise { - try { - console.log('🎤 Inizio conversione base64 per URI:', audioUri); - - // Verifica se FileSystem è disponibile - if (!FileSystem || !FileSystem.readAsStringAsync) { - console.error('FileSystem non disponibile o metodo readAsStringAsync mancante'); - return null; - } - - // Verifica se l'URI esiste - const fileInfo = await FileSystem.getInfoAsync(audioUri); - if (!fileInfo.exists) { - console.error('File audio non esiste:', audioUri); - return null; - } - - console.log('🎤 File info:', fileInfo); - - // Leggiamo i primi bytes per verificare l'header del file - const headerBytes = await this.readFileHeader(audioUri); - console.log('🎤 Header bytes (primi 8):', headerBytes); - - // In React Native/Expo, usiamo FileSystem per leggere il file - const base64Data = await FileSystem.readAsStringAsync(audioUri, { - encoding: FileSystem.EncodingType.Base64, - }); - - console.log('🎤 Conversione base64 completata, lunghezza:', base64Data?.length || 0); - - // Debug: verifica i primi caratteri del base64 per identificare il formato - if (base64Data && base64Data.length > 20) { - const first20chars = base64Data.substring(0, 20); - console.log('🎤 Prime 20 chars base64:', first20chars); - - // Decodifica i primi bytes per vedere l'header - const headerBase64 = base64Data.substring(0, 32); // primi ~24 bytes - const headerBuffer = this.base64ToBytes(headerBase64); - console.log('🎤 Header decodificato:', Array.from(headerBuffer).map(b => b.toString(16).padStart(2, '0')).join(' ')); - } - - return base64Data; - - } catch (error) { - console.error('Errore conversione base64:', error); - console.error('URI problematico:', audioUri); - console.error('FileSystem disponibile:', !!FileSystem); - console.error('readAsStringAsync disponibile:', !!(FileSystem?.readAsStringAsync)); - return null; - } - } - - /** - * Ottieni la durata della registrazione corrente - */ getRecordingDuration(): number { if (!this.isRecording) return 0; return Date.now() - this.recordingStartTime; } - /** - * Controlla se la registrazione è attiva - */ isCurrentlyRecording(): boolean { return this.isRecording; } - /** - * Cancella la registrazione corrente - */ async cancelRecording(): Promise { - if (this.recording && this.isRecording) { + if (this.isRecording) { try { - await this.recording.stopAndUnloadAsync(); + await this.voiceProcessor.stop(); } catch (error) { console.error('Errore cancellazione registrazione:', error); } @@ -327,368 +256,45 @@ export class AudioRecorder { this.cleanup(); } - /** - * Legge i primi bytes del file per verificare l'header - */ - private async readFileHeader(audioUri: string): Promise { - try { - // Leggiamo tutto il file in base64, poi prendiamo solo i primi bytes - const fullBase64 = await FileSystem.readAsStringAsync(audioUri, { - encoding: FileSystem.EncodingType.Base64, - }); - - // Prendiamo solo i primi ~16 caratteri base64 (corrispondenti a ~12 bytes) - const headerBase64 = fullBase64.substring(0, 16); - const headerBytes = this.base64ToBytes(headerBase64); - return Array.from(headerBytes).slice(0, 8); - } catch (error) { - console.error('Errore lettura header:', error); - return []; - } - } - - /** - * Converte base64 in array di bytes - */ - private base64ToBytes(base64: string): Uint8Array { - try { - return decodeBase64(base64); - } catch (error) { - console.error('Errore conversione base64 to bytes:', error); - return new Uint8Array(0); - } - } - - /** - * Avvia il monitoraggio VAD - */ - private startVADMonitoring(): void { - this.meteringInterval = setInterval(async () => { - if (!this.recording || !this.isRecording) { - this.stopVADMonitoring(); - return; - } - - try { - const status = await this.recording.getStatusAsync(); - - if (status.isRecording && status.metering !== undefined) { - const meteringDB = status.metering; - - // Notifica aggiornamento livello audio - this.vadCallbacks.onMeteringUpdate?.(meteringDB); - - // Processa il livello audio per VAD - this.processMeteringLevel(meteringDB); - } - } catch (error) { - console.error('Errore monitoraggio VAD:', error); - } - }, VAD_CONFIG.METERING_POLL_INTERVAL_MS); - - console.log('🎤 Monitoraggio VAD avviato'); - } - - /** - * Ferma il monitoraggio VAD - */ - private stopVADMonitoring(): void { - if (this.meteringInterval) { - clearInterval(this.meteringInterval); - this.meteringInterval = null; - console.log('🎤 Monitoraggio VAD fermato'); - } - } - - /** - * Processa il livello audio per rilevare voce e silenzio - */ - private processMeteringLevel(meteringDB: number): void { - const recordingDuration = this.getRecordingDuration(); - - // Log del livello audio ogni 500ms per debugging - if (recordingDuration % 500 < VAD_CONFIG.METERING_POLL_INTERVAL_MS) { - console.log(`📊 Audio Level: ${meteringDB.toFixed(1)} dB | Duration: ${(recordingDuration / 1000).toFixed(1)}s`); - } - - // Non attivare VAD se la registrazione è troppo corta - if (recordingDuration < VAD_CONFIG.MIN_RECORDING_DURATION_MS) { - console.log(`⏱️ VAD: Attesa iniziale... (${recordingDuration}ms/${VAD_CONFIG.MIN_RECORDING_DURATION_MS}ms)`); - return; - } - - // Rilevamento voce - if (meteringDB > VAD_CONFIG.SPEECH_THRESHOLD_DB) { - if (!this.isSpeechDetected) { - this.isSpeechDetected = true; - this.vadCallbacks.onSpeechStart?.(); - console.log(`🎤 VAD: ✅ VOCE RILEVATA! (${meteringDB.toFixed(1)} dB > ${VAD_CONFIG.SPEECH_THRESHOLD_DB} dB)`); - } - - // Reset timer silenzio quando si parla - if (this.silenceStartTime) { - console.log(`🔊 VAD: Voce continua, reset timer silenzio`); - this.silenceStartTime = null; - } - } - // Rilevamento silenzio - else if (meteringDB < VAD_CONFIG.SILENCE_THRESHOLD_DB) { - // Inizia timer silenzio - if (!this.silenceStartTime) { - this.silenceStartTime = Date.now(); - this.vadCallbacks.onSilenceDetected?.(); - console.log(`🔇 VAD: ⏸️ SILENZIO RILEVATO (${meteringDB.toFixed(1)} dB < ${VAD_CONFIG.SILENCE_THRESHOLD_DB} dB)`); - } else { - const silenceDuration = Date.now() - this.silenceStartTime; - - // Log progressivo del silenzio - if (silenceDuration % 300 < VAD_CONFIG.METERING_POLL_INTERVAL_MS) { - console.log(`⏱️ VAD: Silenzio da ${(silenceDuration / 1000).toFixed(1)}s (stop a ${(VAD_CONFIG.SILENCE_DURATION_MS / 1000).toFixed(1)}s)`); - } - - // Se il silenzio dura abbastanza, ferma la registrazione - if (silenceDuration >= VAD_CONFIG.SILENCE_DURATION_MS && this.isSpeechDetected) { - console.log(`🛑 VAD: ⏹️ AUTO-STOP ATTIVATO! (silenzio prolungato ${(silenceDuration / 1000).toFixed(1)}s)`); - this.vadCallbacks.onAutoStop?.(); - this.vadCallbacks.onSpeechEnd?.(); - } - } + private cleanup(): void { + if (this.frameListener) { + this.voiceProcessor.removeFrameListener(this.frameListener); + this.frameListener = null; } - // Zona intermedia (tra soglia silenzio e soglia voce) - else { - console.log(`📍 VAD: Zona intermedia (${meteringDB.toFixed(1)} dB tra ${VAD_CONFIG.SILENCE_THRESHOLD_DB} e ${VAD_CONFIG.SPEECH_THRESHOLD_DB})`); + if (this.errorListener) { + this.voiceProcessor.removeErrorListener(this.errorListener); + this.errorListener = null; } - } - /** - * Pulisce le risorse della registrazione - */ - private cleanup(): void { - this.stopVADMonitoring(); - this.recording = null; this.isRecording = false; this.recordingStartTime = 0; - this.silenceStartTime = null; - this.vadEnabled = false; - this.isSpeechDetected = false; + this.onChunkCallback = null; } } /** - * Classe per gestire la riproduzione audio con streaming + * Classe per gestire la riproduzione di chunk audio PCM16 + * Usa expo-av per il playback */ export class AudioPlayer { private currentSound: Audio.Sound | null = null; - private chunkBuffer: Array<{ index?: number; data: string }> = []; + private chunkBuffer: { index?: number; data: string }[] = []; private seenChunkIndexes: Set = new Set(); private highestIndexedChunk: number = -1; private isPlaying: boolean = false; - private onCompleteCallback: (() => void) | null = null; - - // Buffer state tracking per diagnostica - private lastChunkReceivedTime: number = 0; - private chunkArrivalTimes: number[] = []; - private bufferStartTime: number = 0; - private isBufferingStarted: boolean = false; - - constructor() {} - - /** - * Converte base64 in array di bytes - */ - private base64ToBytes(base64: string): Uint8Array { - try { - return decodeBase64(base64); - } catch (error) { - console.error('Errore conversione base64 to bytes:', error); - return new Uint8Array(0); - } - } - - /** - * Converte bytes in base64 - */ - private bytesToBase64(bytes: Uint8Array): string { - try { - return encodeBase64(bytes); - } catch (error) { - console.error('Errore conversione bytes to base64:', error); - return ''; - } - } - - /** - * Tenta di individuare il formato audio dai primi bytes - */ - private detectAudioFormat(data: Uint8Array): 'wav' | 'mp3' | 'm4a' | 'ogg' | 'unknown' { - if (data.length < 12) return 'unknown'; - - // RIFF/WAVE - if ( - data[0] === 0x52 && data[1] === 0x49 && data[2] === 0x46 && data[3] === 0x46 && - data[8] === 0x57 && data[9] === 0x41 && data[10] === 0x56 && data[11] === 0x45 - ) { - return 'wav'; - } - - // MP3 (ID3 tag o frame sync 0xfff*) - if ( - (data[0] === 0x49 && data[1] === 0x44 && data[2] === 0x33) || // ID3 - (data[0] === 0xff && (data[1] & 0xe0) === 0xe0) // frame sync - ) { - return 'mp3'; - } - - // OGG - if (data[0] === 0x4f && data[1] === 0x67 && data[2] === 0x67 && data[3] === 0x53) { - return 'ogg'; - } - - // MP4/M4A (ftyp atom) - if ( - data[4] === 0x66 && data[5] === 0x74 && data[6] === 0x79 && data[7] === 0x70 && - data[8] === 0x4d && data[9] === 0x34 && data[10] === 0x41 - ) { - return 'm4a'; - } - - return 'unknown'; - } - - /** - * Controlla se il buffer è pronto per la riproduzione - * Ritorna true se abbiamo almeno MIN_CHUNKS_BEFORE_PLAYBACK chunk - */ - isReadyToPlay(): boolean { - const bufferedCount = this.getBufferedChunksCount(); - const isReady = bufferedCount >= VOICE_CHUNK_CONFIG.MIN_CHUNKS_BEFORE_PLAYBACK; - - if (!isReady && bufferedCount > 0) { - console.log(`🔊 Buffer non pronto: ${bufferedCount}/${VOICE_CHUNK_CONFIG.MIN_CHUNKS_BEFORE_PLAYBACK} chunk`); - } - - return isReady; - } - - /** - * Ottiene il numero di chunk attualmente nel buffer - */ - getBufferedChunksCount(): number { - return this.chunkBuffer.length; - } - - /** - * Ottiene statistiche sull'arrivo dei chunk - */ - getChunkArrivalStatistics(): { - totalReceived: number; - averageInterArrivalMs: number; - minInterArrivalMs: number; - maxInterArrivalMs: number; - bursts: number; - } | null { - if (this.chunkArrivalTimes.length < 2) return null; - - const interArrivals: number[] = []; - for (let i = 1; i < this.chunkArrivalTimes.length; i++) { - interArrivals.push(this.chunkArrivalTimes[i] - this.chunkArrivalTimes[i - 1]); - } - - const burstCount = interArrivals.filter( - time => time < VOICE_CHUNK_CONFIG.BURST_DETECTION_THRESHOLD_MS - ).length; - - const avgInterArrival = interArrivals.reduce((a, b) => a + b, 0) / interArrivals.length; - - return { - totalReceived: this.chunkArrivalTimes.length, - averageInterArrivalMs: avgInterArrival, - minInterArrivalMs: Math.min(...interArrivals), - maxInterArrivalMs: Math.max(...interArrivals), - bursts: burstCount, - }; - } /** - * Riproduce audio da dati base64 concatenati - */ - async playAudioFromBase64(base64Data: string, onComplete?: () => void): Promise { - try { - await Audio.setAudioModeAsync({ - allowsRecordingIOS: false, - playsInSilentModeIOS: true, - staysActiveInBackground: true, - shouldDuckAndroid: true, - playThroughEarpieceAndroid: false, - }); - - const tempUri = `${FileSystem.documentDirectory}temp_audio_${Date.now()}.m4a`; - - await FileSystem.writeAsStringAsync(tempUri, base64Data, { - encoding: FileSystem.EncodingType.Base64, - }); - - const { sound } = await Audio.Sound.createAsync({ uri: tempUri }); - this.currentSound = sound; - - this.currentSound.setOnPlaybackStatusUpdate((status) => { - if (status.isLoaded && status.didJustFinish) { - console.log('🔊 Riproduzione completata'); - this.onPlaybackComplete(onComplete); - } - }); - - await this.currentSound.playAsync(); - this.isPlaying = true; - - console.log('🔊 Riproduzione audio iniziata'); - return true; - - } catch (error) { - console.error('🔊 Errore riproduzione audio:', error); - return false; - } - } - - /** - * Aggiunge un chunk alla collezione + * Aggiunge un chunk PCM16 base64 al buffer */ addChunk(base64Data: string, chunkIndex?: number): boolean { - const currentTime = Date.now(); - - // Traccia timing arrivo chunk (per prima volta) - if (!this.isBufferingStarted) { - this.isBufferingStarted = true; - this.bufferStartTime = currentTime; - console.log(`🔊 ⏱️ INIZIO BUFFERING chunk audio`); - } - - // Traccia inter-arrival time - if (this.lastChunkReceivedTime > 0) { - const interArrivalMs = currentTime - this.lastChunkReceivedTime; - this.chunkArrivalTimes.push(currentTime); - - if (interArrivalMs < VOICE_CHUNK_CONFIG.BURST_DETECTION_THRESHOLD_MS) { - console.warn(`🔊 ⚡ BURST RILEVATO: ${interArrivalMs}ms tra chunk`); - } - } else { - this.chunkArrivalTimes.push(currentTime); - } - - this.lastChunkReceivedTime = currentTime; - if (typeof chunkIndex === 'number') { if (this.seenChunkIndexes.has(chunkIndex)) { - console.warn(`🔊 Chunk duplicato ricevuto (indice ${chunkIndex}) - ignorato`); + console.warn(`Chunk duplicato (indice ${chunkIndex}) - ignorato`); return false; } - if (this.highestIndexedChunk >= 0 && chunkIndex > this.highestIndexedChunk + 1) { - console.warn(`🔊 Mancano uno o più chunk prima dell'indice ${chunkIndex} (ultimo ricevuto ${this.highestIndexedChunk})`); - } - if (this.highestIndexedChunk >= 0 && chunkIndex < this.highestIndexedChunk) { - console.warn(`🔊 Chunk fuori ordine rilevato: indice ${chunkIndex} ricevuto dopo ${this.highestIndexedChunk}`); + console.warn(`Chunk fuori ordine: indice ${chunkIndex} dopo ${this.highestIndexedChunk}`); } this.seenChunkIndexes.add(chunkIndex); @@ -696,93 +302,65 @@ export class AudioPlayer { } this.chunkBuffer.push({ index: chunkIndex, data: base64Data }); - const bufferedCount = this.getChunksCount(); - console.log(`🔊 Chunk #${typeof chunkIndex === 'number' ? chunkIndex : '?'} aggiunto. Buffer: ${bufferedCount}/${VOICE_CHUNK_CONFIG.MIN_CHUNKS_BEFORE_PLAYBACK}`); - return true; } /** - * Unisce TUTTI i chunk in un singolo file, poi lo riproduce - * Salva il file concatenato su disco prima di riprodurre + * Concatena tutti i chunk PCM16, li wrappa in WAV e li riproduce */ - - async playChunksSequentially(onComplete?: () => void): Promise { + async playPcm16Chunks(onComplete?: () => void): Promise { const totalChunks = this.getChunksCount(); - if (totalChunks === 0) { console.log('AudioPlayer: Nessun chunk da riprodurre'); return false; } - console.log(`AudioPlayer: Unione di ${totalChunks} chunk in corso...`); - try { + // Ordina i chunk per indice const indexedChunks = this.chunkBuffer .filter(chunk => typeof chunk.index === 'number') .sort((a, b) => (a.index as number) - (b.index as number)); const nonIndexedChunks = this.chunkBuffer.filter(chunk => typeof chunk.index !== 'number'); + const allChunks = [...indexedChunks, ...nonIndexedChunks]; - const playbackQueue = [...indexedChunks, ...nonIndexedChunks]; - - if (playbackQueue.length === 0) { - console.warn('AudioPlayer: Nessun chunk valido da riprodurre'); + if (allChunks.length === 0) { this.clearChunks(); return false; } - console.log('AudioPlayer: Step 1, decodifica chunk base64 e concatenazione binari...'); - - // Decodifica OGNI chunk base64 completamente a binario + // Decodifica tutti i chunk in binario const binaryChunks: Uint8Array[] = []; - for (const chunk of playbackQueue) { - const binaryData = this.base64ToBytes(chunk.data); - if (binaryData.length > 0) { - binaryChunks.push(binaryData); - console.log(` Chunk decodificato: ${binaryData.length} bytes`); + for (const chunk of allChunks) { + const binary = decodeBase64(chunk.data); + if (binary.length > 0) { + binaryChunks.push(binary); } } if (binaryChunks.length === 0) { - console.warn('AudioPlayer: Nessun chunk valido da decodificare'); this.clearChunks(); return false; } - // Concatena i binari usando Uint8Array.set() - const totalBinaryLength = binaryChunks.reduce((acc, chunk) => acc + chunk.length, 0); - const totalBinaryData = new Uint8Array(totalBinaryLength); + // Concatena tutti i dati PCM16 + const totalLength = binaryChunks.reduce((acc, c) => acc + c.length, 0); + const pcm16Data = new Uint8Array(totalLength); let offset = 0; - - binaryChunks.forEach((chunk) => { - totalBinaryData.set(chunk, offset); + for (const chunk of binaryChunks) { + pcm16Data.set(chunk, offset); offset += chunk.length; - }); - - console.log(`AudioPlayer: Step 1 completato (${totalBinaryData.length} bytes binari da ${binaryChunks.length} chunk)`); - - const detectedFormat = this.detectAudioFormat(totalBinaryData); - const extension = detectedFormat === 'unknown' ? 'm4a' : detectedFormat; - if (detectedFormat === 'unknown') { - console.warn('AudioPlayer: Formato audio non rilevato, uso fallback .m4a'); - } else { - console.log(`AudioPlayer: Formato audio rilevato -> ${detectedFormat}`); } - console.log(`AudioPlayer: Dati audio decodificati (${totalBinaryData.length} bytes)`); - - console.log('AudioPlayer: Step 2, salvataggio file audio concatenato...'); - const finalAudioPath = `${FileSystem.documentDirectory}final_audio_${Date.now()}.${extension}`; + console.log(`AudioPlayer: ${totalChunks} chunk -> ${pcm16Data.length} bytes PCM16`); - // Riencodifica a base64 per scrivere il file (richiesto da FileSystem) - const finalBase64 = this.bytesToBase64(totalBinaryData); + // Wrappa in WAV per la riproduzione con expo-av + const wavData = wrapPcm16InWav(pcm16Data, AUDIO_CONFIG.SAMPLE_RATE); + const wavBase64 = encodeBase64(wavData); - await FileSystem.writeAsStringAsync(finalAudioPath, finalBase64, { + const tempPath = `${FileSystem.documentDirectory}voice_response_${Date.now()}.wav`; + await FileSystem.writeAsStringAsync(tempPath, wavBase64, { encoding: FileSystem.EncodingType.Base64, }); - console.log(`AudioPlayer: Step 2 completato (file: ${finalAudioPath.split('/').pop()})`); - - console.log('AudioPlayer: Step 3, avvio riproduzione file concatenato...'); await Audio.setAudioModeAsync({ allowsRecordingIOS: false, @@ -792,199 +370,25 @@ export class AudioPlayer { playThroughEarpieceAndroid: false, }); - const { sound } = await Audio.Sound.createAsync({ uri: finalAudioPath }); + const { sound } = await Audio.Sound.createAsync({ uri: tempPath }); this.currentSound = sound; this.currentSound.setOnPlaybackStatusUpdate(async (status) => { if (status.isLoaded && status.didJustFinish) { console.log('AudioPlayer: Riproduzione completata'); - try { - await this.currentSound?.unloadAsync(); - } catch (e) { - console.warn('AudioPlayer: Errore unload audio'); - } - - try { - await FileSystem.deleteAsync(finalAudioPath); - console.log('AudioPlayer: File temporaneo eliminato'); - } catch (e) { - console.warn('AudioPlayer: Errore eliminazione file temporaneo'); - } - - this.clearChunks(); - onComplete?.(); + await this.onPlaybackComplete(onComplete, tempPath); } }); await this.currentSound.playAsync(); this.isPlaying = true; - console.log('AudioPlayer: Step 3 completato, riproduzione avviata'); - - return true; - } catch (error) { - console.error('AudioPlayer: Errore durante la riproduzione concatenata:', error); this.clearChunks(); - return false; - } - } - - /** - * Concatena tutti i chunk e li riproduce - * Salva i chunk in un singolo file e lo riproduce - */ - - async playAllChunks(onComplete?: () => void): Promise { - const totalChunks = this.getChunksCount(); - - if (totalChunks === 0) { - console.log('AudioPlayer: Nessun chunk da riprodurre'); - return false; - } - - const stats = this.getChunkArrivalStatistics(); - console.log('AudioPlayer: Stato buffer per playback'); - console.log(` - Total chunks: ${totalChunks}`); - if (stats) { - console.log(` - Avg inter-arrival: ${stats.averageInterArrivalMs.toFixed(2)}ms`); - console.log(` - Min/Max: ${stats.minInterArrivalMs}ms / ${stats.maxInterArrivalMs}ms`); - console.log(` - Burst events: ${stats.bursts}`); - } - if (this.isBufferingStarted) { - const bufferDuration = this.lastChunkReceivedTime - this.bufferStartTime; - console.log(` - Buffer duration: ${bufferDuration}ms`); - } - - console.log(`AudioPlayer: Inizio concatenazione di ${totalChunks} chunks...`); - - try { - const indexedChunks = this.chunkBuffer - .filter(chunk => typeof chunk.index === 'number') - .sort((a, b) => (a.index as number) - (b.index as number)); - const nonIndexedChunks = this.chunkBuffer.filter(chunk => typeof chunk.index !== 'number'); - - if (indexedChunks.length > 1) { - const sortedIndexes = indexedChunks.map(chunk => chunk.index as number); - for (let i = 1; i < sortedIndexes.length; i++) { - const expected = sortedIndexes[i - 1] + 1; - if (sortedIndexes[i] !== expected) { - if (sortedIndexes[i] < expected) { - console.warn(`AudioPlayer: Ordine chunk non crescente: indice ${sortedIndexes[i]} dopo ${sortedIndexes[i - 1]}`); - } else { - console.warn(`AudioPlayer: Mancano ${sortedIndexes[i] - expected} chunk audio prima dell'indice ${sortedIndexes[i]}`); - } - } - } - } - - const playbackQueue = [...indexedChunks, ...nonIndexedChunks]; - - if (playbackQueue.length === 0) { - console.warn('AudioPlayer: Nessun chunk valido da riprodurre dopo il filtraggio'); - this.clearChunks(); - return false; - } - - console.log('AudioPlayer: Decodifica chunk base64 e concatenazione binari...'); - - // Decodifica OGNI chunk base64 completamente a binario - const binaryChunks: Uint8Array[] = []; - let processedChunkCount = 0; - - for (const chunk of playbackQueue) { - try { - const binaryData = this.base64ToBytes(chunk.data); - - if (binaryData.length === 0) { - console.warn(`AudioPlayer: Chunk ${chunk.index ?? processedChunkCount} vuoto, ignorato`); - continue; - } - - binaryChunks.push(binaryData); - processedChunkCount++; - console.log(`AudioPlayer: Chunk ${chunk.index ?? processedChunkCount} decodificato (${binaryData.length} bytes)`); - } catch (chunkError) { - console.warn(`AudioPlayer: Errore decodifica chunk ${chunk.index ?? processedChunkCount}:`, chunkError); - } - } - - if (binaryChunks.length === 0) { - console.warn('AudioPlayer: Nessun chunk audio valido dopo la decodifica'); - this.clearChunks(); - return false; - } - - // Concatena i binari usando Uint8Array.set() - const totalBinaryLength = binaryChunks.reduce((acc, chunk) => acc + chunk.length, 0); - const totalBinaryData = new Uint8Array(totalBinaryLength); - let offset = 0; - - binaryChunks.forEach((chunk) => { - totalBinaryData.set(chunk, offset); - offset += chunk.length; - }); - - const detectedFormat = this.detectAudioFormat(totalBinaryData); - const extension = detectedFormat === 'unknown' ? 'm4a' : detectedFormat; - if (detectedFormat === 'unknown') { - console.warn('AudioPlayer: Formato audio non rilevato, uso fallback .m4a'); - } else { - console.log(`AudioPlayer: Formato audio rilevato -> ${detectedFormat}`); - } - - const finalAudioPath = `${FileSystem.documentDirectory}final_audio_${Date.now()}.${extension}`; - const completeAudioBase64 = this.bytesToBase64(totalBinaryData); - - console.log('AudioPlayer: Audio concatenato pronto:'); - console.log(` - Chunks elaborati: ${binaryChunks.length}`); - console.log(` - Dimensione binaria: ${totalBinaryData.length} bytes`); - console.log(` - Dimensione base64: ${completeAudioBase64.length} caratteri`); - console.log(` - Salvataggio file: ${finalAudioPath.split('/').pop()}`); - - await FileSystem.writeAsStringAsync(finalAudioPath, completeAudioBase64, { - encoding: FileSystem.EncodingType.Base64, - }); - - this.clearChunks(); - - console.log('AudioPlayer: Riproduzione file audio...'); - - try { - await Audio.setAudioModeAsync({ - allowsRecordingIOS: false, - playsInSilentModeIOS: true, - staysActiveInBackground: true, - shouldDuckAndroid: true, - playThroughEarpieceAndroid: false, - }); - - const { sound } = await Audio.Sound.createAsync({ uri: finalAudioPath }); - this.currentSound = sound; - - this.currentSound.setOnPlaybackStatusUpdate((status) => { - if (status.isLoaded && status.didJustFinish) { - console.log('AudioPlayer: Riproduzione completata'); - this.onPlaybackComplete(onComplete, finalAudioPath); - } - }); - - await this.currentSound.playAsync(); - this.isPlaying = true; - console.log('AudioPlayer: Riproduzione audio iniziata'); - return true; - } catch (error) { - console.error('AudioPlayer: Errore riproduzione:', error); - - try { - await FileSystem.deleteAsync(finalAudioPath); - } catch { - console.warn('AudioPlayer: Errore eliminazione file temporaneo'); - } - return false; - } + console.log('AudioPlayer: Riproduzione WAV avviata'); + return true; } catch (error) { - console.error('AudioPlayer: Errore concatenazione:', error); + console.error('AudioPlayer: Errore riproduzione PCM16:', error); this.clearChunks(); return false; } @@ -992,88 +396,56 @@ export class AudioPlayer { /** * Svuota i chunk accumulati - */ clearChunks(): void { this.chunkBuffer = []; this.seenChunkIndexes.clear(); this.highestIndexedChunk = -1; - - // Reset timing per prossimo ciclo - this.lastChunkReceivedTime = 0; - this.chunkArrivalTimes = []; - this.bufferStartTime = 0; - this.isBufferingStarted = false; - - console.log('🔊 Chunks svuotati e timing reset'); } - /** - * Gestisce il completamento della riproduzione - */ private async onPlaybackComplete(onComplete?: () => void, audioFilePath?: string): Promise { if (this.currentSound) { try { await this.currentSound.unloadAsync(); } catch (error) { - console.error('🔊 Errore cleanup audio:', error); + console.error('Errore cleanup audio:', error); } this.currentSound = null; } - // Pulisci il file audio temporaneo if (audioFilePath) { try { await FileSystem.deleteAsync(audioFilePath); - console.log('🔊 File audio temporaneo eliminato'); } catch { - console.warn('🔊 Errore eliminazione file audio temporaneo'); + // Ignora errore eliminazione file temp } } this.isPlaying = false; - - if (onComplete) { - onComplete(); - } + onComplete?.(); } - /** - * Ferma la riproduzione corrente - */ async stopPlayback(): Promise { if (this.currentSound) { try { await this.currentSound.stopAsync(); await this.currentSound.unloadAsync(); - console.log('🔊 Riproduzione fermata'); } catch (error) { console.error('Errore stop riproduzione:', error); } this.currentSound = null; } - this.isPlaying = false; - this.onCompleteCallback = null; } - /** - * Controlla se la riproduzione è attiva - */ isCurrentlyPlaying(): boolean { return this.isPlaying; } - /** - * Ottiene il numero di chunk accumulati - */ getChunksCount(): number { return this.chunkBuffer.length; } - /** - * Distrugge il player e pulisce tutte le risorse - */ async destroy(): Promise { await this.stopPlayback(); this.clearChunks(); @@ -1093,7 +465,7 @@ export function arrayBufferToBase64(buffer: ArrayBuffer): string { */ export function base64ToArrayBuffer(base64: string): ArrayBuffer { const bytes = decodeBase64(base64); - return bytes.buffer; + return bytes.buffer as ArrayBuffer; } /** @@ -1116,6 +488,6 @@ export function formatDuration(milliseconds: number): string { const seconds = Math.floor(milliseconds / 1000); const minutes = Math.floor(seconds / 60); const remainingSeconds = seconds % 60; - + return `${minutes}:${remainingSeconds.toString().padStart(2, '0')}`; } diff --git a/test/test_botservice.ts b/test/test_botservice.ts index 3ef90dd..b17a749 100644 --- a/test/test_botservice.ts +++ b/test/test_botservice.ts @@ -1,4 +1,4 @@ -import { sendMessageToBot, validateMessage, formatMessage } from "../src/services/botservice"; +import { sendMessageToBot, validateMessage, formatMessage } from "../src/services/textBotService"; /** * Script di test TypeScript per sendMessageToBot diff --git a/voice_bot_websocket_api.md b/voice_bot_websocket_api.md new file mode 100644 index 0000000..5a7b980 --- /dev/null +++ b/voice_bot_websocket_api.md @@ -0,0 +1,375 @@ +# Voice Bot WebSocket API Documentation + +## Endpoint + +``` +WS /chat/voice-bot-websocket +``` + +WebSocket endpoint for real-time voice interaction with the AI assistant. Uses the OpenAI Realtime API with MCP tools, handling transcription, AI reasoning, tool execution, and TTS natively — no separate Whisper or TTS calls needed. + +--- + +## Connection Flow + +``` +Client Server + | | + |--- WebSocket connect --------->| + |<-- connection accepted --------| + | | + |--- { type: "auth", token } --->| Phase 1: Authentication + |<-- { type: "status", | + | phase: "authenticated" } -| + | | + | (server sets up MCP + | Phase 2: Setup + | RealtimeAgent internally) | + |<-- { type: "status", | + | phase: "ready" } ---------| + | | + |<== bidirectional messages ====>| Phase 3: Conversation + | | + |<-- { type: "done" } -----------| Session ended +``` + +--- + +## Phase 1: Authentication + +The **first message** sent by the client **must** be an authentication message. Any other message type before authentication will return an error. + +### Request + +```json +{ + "type": "auth", + "token": "" +} +``` + +| Field | Type | Required | Description | +|---------|--------|----------|--------------------------------------------------------------------| +| `type` | string | yes | Must be `"auth"` | +| `token` | string | yes | JWT access token. May optionally include the `"Bearer "` prefix. | + +### Responses + +**Success:** + +```json +{ + "type": "status", + "phase": "authenticated", + "message": "Autenticato come (ID: )" +} +``` + +**Error — missing token:** + +```json +{ + "type": "error", + "message": "Token di autenticazione richiesto" +} +``` + +**Error — invalid token:** + +```json +{ + "type": "error", + "message": "Token non valido: " +} +``` + +**Error — user not found:** + +```json +{ + "type": "error", + "message": "Utente non trovato" +} +``` + +> After a failed auth attempt the connection remains open and the client can retry. + +--- + +## Phase 2: Setup (automatic) + +After successful authentication the server: + +1. Reads the user's `voice_gender` preference (`"female"` or `"male"`) and selects the corresponding voice (`coral` / `echo`). +2. Connects to the MCP server with a scoped JWT. +3. Creates a `RealtimeAgent` with MCP tools and the system prompt. +4. Opens the OpenAI Realtime session configured with: + - Audio format: PCM16 + - Transcription model: `gpt-4o-mini-transcribe` (language: `it`) + - Turn detection: semantic VAD with interrupt support + +When ready the server sends: + +```json +{ + "type": "status", + "phase": "ready", + "message": "Sessione vocale pronta" +} +``` + +The client should **wait for this message** before sending audio or text. + +--- + +## Phase 3: Conversation + +### Client -> Server messages + +#### Send text + +```json +{ + "type": "text", + "content": "Quali task ho per oggi?" +} +``` + +| Field | Type | Required | Description | +|-----------|--------|----------|------------------------| +| `type` | string | yes | `"text"` | +| `content` | string | yes | Plain text message | + +#### Send audio + +Stream audio data in chunks. Audio must be **PCM16** encoded. + +```json +{ + "type": "audio", + "data": "" +} +``` + +| Field | Type | Required | Description | +|--------|--------|----------|------------------------------------| +| `type` | string | yes | `"audio"` | +| `data` | string | yes | Base64-encoded PCM16 audio bytes | + +#### Commit audio buffer + +Signal the end of an audio utterance to trigger processing: + +```json +{ + "type": "audio_commit" +} +``` + +> Only needed if the client wants to explicitly commit; otherwise semantic VAD handles turn detection automatically. + +#### Interrupt + +Cancel the current assistant response (e.g. when the user starts speaking): + +```json +{ + "type": "interrupt" +} +``` + +--- + +### Server -> Client messages + +#### Status updates + +```json +{ + "type": "status", + "phase": "", + "message": "" +} +``` + +| `phase` | Meaning | +|------------------|--------------------------------------| +| `authenticated` | Auth succeeded | +| `ready` | Session ready for input | +| `interrupted` | Audio playback interrupted | +| `audio_end` | Assistant finished speaking | +| `agent_start` | Agent started processing | +| `agent_end` | Agent finished processing | + +#### Audio response + +```json +{ + "type": "audio", + "data": "", + "chunk_index": 0 +} +``` + +| Field | Type | Description | +|---------------|--------|----------------------------------------------| +| `data` | string | Base64-encoded PCM16 audio chunk | +| `chunk_index` | int | Sequential index (resets to 0 each turn) | + +The client should decode and play these chunks sequentially. + +#### Transcript + +```json +{ + "type": "transcript", + "role": "user" | "assistant", + "content": "Hai 3 task per oggi..." +} +``` + +| Field | Type | Description | +|-----------|--------|----------------------------------------------| +| `role` | string | `"user"` (transcribed speech) or `"assistant"` (generated text) | +| `content` | string | Transcript text | + +#### Tool execution + +**Start:** + +```json +{ + "type": "tool_start", + "tool_name": "get_tasks", + "arguments": "{\"date\": \"2026-01-29\"}" +} +``` + +**End:** + +```json +{ + "type": "tool_end", + "tool_name": "get_tasks", + "output": "[{\"title\": \"Meeting\", ...}]" +} +``` + +#### Error + +```json +{ + "type": "error", + "message": "" +} +``` + +#### Done + +Sent when the session ends cleanly: + +```json +{ + "type": "done" +} +``` + +--- + +## Audio Format + +| Parameter | Value | +|--------------|-----------------| +| Encoding | PCM16 (signed 16-bit little-endian) | +| Sample rate | 24000 Hz | +| Channels | 1 (mono) | +| Transport | Base64 over JSON | + +--- + +## Example Client (JavaScript) + +```javascript +const ws = new WebSocket("wss://api.mytasklyapp.com/chat/voice-bot-websocket"); + +ws.onopen = () => { + // Step 1: Authenticate + ws.send(JSON.stringify({ + type: "auth", + token: accessToken + })); +}; + +ws.onmessage = (event) => { + const msg = JSON.parse(event.data); + + switch (msg.type) { + case "status": + if (msg.phase === "ready") { + // Session ready — can now send audio/text + startRecording(); + } + break; + + case "audio": + // Decode and enqueue for playback + const pcm = base64ToArrayBuffer(msg.data); + audioPlayer.enqueue(pcm); + break; + + case "transcript": + console.log(`[${msg.role}]: ${msg.content}`); + break; + + case "tool_start": + console.log(`Calling tool: ${msg.tool_name}`); + break; + + case "tool_end": + console.log(`Tool result: ${msg.output}`); + break; + + case "error": + console.error("Server error:", msg.message); + break; + + case "done": + ws.close(); + break; + } +}; + +// Send audio chunk from microphone +function onAudioChunk(pcm16Buffer) { + const b64 = arrayBufferToBase64(pcm16Buffer); + ws.send(JSON.stringify({ type: "audio", data: b64 })); +} + +// Interrupt assistant +function onUserStartsSpeaking() { + ws.send(JSON.stringify({ type: "interrupt" })); +} +``` + +--- + +## Error Handling + +| Scenario | Behavior | +|-----------------------------|-------------------------------------------------------| +| No auth message first | Server responds with error, connection stays open | +| Invalid/expired token | Server responds with error, client can retry auth | +| User not found in DB | Server responds with error, client can retry auth | +| MCP server unreachable | Server sends error and closes the connection | +| OpenAI session failure | Server sends error and closes the connection | +| Client disconnects | Server cleans up MCP and Realtime session gracefully | +| Unknown message type | Server responds with error, connection stays open | + +--- + +## Notes + +- The voice used by the assistant depends on the user's `voice_gender` setting in the database (`"female"` -> `coral`, `"male"` -> `echo`). +- Turn detection uses **semantic VAD** — the server automatically detects when the user stops speaking. Manual `audio_commit` is optional. +- The `interrupt` message cancels the current assistant response, useful for barge-in scenarios. +- MCP tools (task management, calendar, etc.) are available to the voice assistant and execute automatically when needed. +- This endpoint does **not** require the `X-API-Key` header (authentication is handled via the WebSocket auth message).