diff --git a/.github/workflows/build_pat.yaml b/.github/workflows/build_pat.yaml index 02c6a2e1..dc9806d7 100755 --- a/.github/workflows/build_pat.yaml +++ b/.github/workflows/build_pat.yaml @@ -19,19 +19,24 @@ jobs: # fail-fast: Default is true, switch to false to allow one platform to fail and still run others fail-fast: false matrix: - name: [Ubuntu, macOS, Windows_2022] + name: [Ubuntu, macOS-Intel, macOS-ARM, Windows_2022] include: - name: Ubuntu os: ubuntu-22.04 node-version: 18 allow_failure: false - - name: macOS + - name: macOS-Intel os: macos-13 node-version: 18 allow_failure: false + arch: x86_64 MACOSX_DEPLOYMENT_TARGET: 10.15 - SDKROOT: /Applications/Xcode_11.7.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk - DEVELOPER_DIR: /Applications/Xcode_13.2.1.app/Contents/Developer + - name: macOS-ARM + os: macos-14 + node-version: 18 + allow_failure: false + arch: arm64 + MACOSX_DEPLOYMENT_TARGET: 12.1 - name: Windows_2022 os: windows-2022 node-version: 18 @@ -54,19 +59,26 @@ jobs: sudo apt update sudo apt install cmake elif [ "$RUNNER_OS" == "macOS" ]; then - curl -L -O https://download.qt.io/archive/qt-installer-framework/4.3.0/QtInstallerFramework-macOS-x64-4.3.0.dmg - hdiutil attach -mountpoint ./qtfiw_installer QtInstallerFramework-macOS-x64-4.3.0.dmg - echo "ls ./qtfiw_installer" - sudo ./qtfiw_installer/QtInstallerFramework-macOS-x64-4.3.0.app/Contents/MacOS/QtInstallerFramework-macOS-x64-4.3.0 --verbose --script ./ci/install_script_qtifw.qs - ls ~/Qt/QtIFW-4.3.0 || true - echo "~/Qt/QtIFW-4.3.0/bin/" >> $GITHUB_PATH + # Install Qt Installer Framework using aqtinstall + python3 -m pip install --break-system-packages aqtinstall + python3 -m aqt install-tool -O "${{ github.workspace }}/Qt/" mac desktop tools_ifw + + # Add Qt IFW to PATH + QT_IFW_DIR=$(find "${{ github.workspace }}/Qt/Tools/QtInstallerFramework" -name "bin" -type d | head -1) + if [ -n "$QT_IFW_DIR" ]; then + echo "$QT_IFW_DIR" >> $GITHUB_PATH + fi echo MACOSX_DEPLOYMENT_TARGET=${{ matrix.MACOSX_DEPLOYMENT_TARGET }} >> $GITHUB_ENV - # echo CMAKE_MACOSX_DEPLOYMENT_TARGET='-DCMAKE_OSX_DEPLOYMENT_TARGET=$MACOSX_DEPLOYMENT_TARGET' >> $GITHUB_ENV elif [ "$RUNNER_OS" == "Windows" ]; then - curl -L -O https://download.qt.io/archive/qt-installer-framework/4.3.0/QtInstallerFramework-windows-x86-4.3.0.exe - ./QtInstallerFramework-windows-x86-4.3.0.exe --verbose --script ./ci/install_script_qtifw.qs - dir "C:/Qt/" - echo "C:/Qt/QtIFW-4.3.0/bin" >> $GITHUB_PATH + # Install Qt Installer Framework using aqtinstall + python3 -m pip install --break-system-packages aqtinstall + python3 -m aqt install-tool -O "${{ github.workspace }}/Qt/" windows desktop tools_ifw + + # Add Qt IFW to PATH + QT_IFW_DIR=$(find "${{ github.workspace }}/Qt/Tools/QtInstallerFramework" -name "bin" -type d | head -1) + if [ -n "$QT_IFW_DIR" ]; then + echo "$QT_IFW_DIR" >> $GITHUB_PATH + fi #echo "Setting CMAKE_GENERATOR options equivalent to ='-G \"Visual Studio 16 2019\" -A x64'" #echo CMAKE_GENERATOR='Visual Studio 16 2019' >> $GITHUB_ENV #echo CMAKE_GENERATOR_PLATFORM=x64 >> $GITHUB_ENV @@ -88,15 +100,16 @@ jobs: run: cmake -E make_directory ./build/ + + - name: Configure CMake & build (Windows) working-directory: ./build if: runner.os == 'Windows' shell: cmd run: | - echo "Using vcvarsall to initialize the development environment" call vcvarsall.bat x64 cmake -G "Visual Studio 17 2022" -A x64 .. - cmake --build . --target package -j ${{ env.N }} --config Release + cmake --build . --target package -j %N% --config Release - name: Configure CMake & build (Linux) working-directory: ./build @@ -120,10 +133,21 @@ jobs: shell: bash run: | set -x - cmake -DCMAKE_OSX_DEPLOYMENT_TARGET=11 \ - -DCMAKE_OSX_ARCHITECTURES="arm64;x86_64" \ - -DCMAKE_BUILD_TYPE=Release \ - ../ + if [ "${{ matrix.arch }}" = "arm64" ]; then + export MATRIX_ARCH=arm64 + echo "MATRIX_ARCH=arm64" >> $GITHUB_ENV + cmake -DCMAKE_OSX_ARCHITECTURES="arm64" \ + -DCMAKE_OSX_DEPLOYMENT_TARGET="${{ matrix.MACOSX_DEPLOYMENT_TARGET }}" \ + -DCMAKE_BUILD_TYPE=Release \ + ../ + else + export MATRIX_ARCH=x86_64 + echo "MATRIX_ARCH=x86_64" >> $GITHUB_ENV + cmake -DCMAKE_OSX_ARCHITECTURES="x86_64" \ + -DCMAKE_OSX_DEPLOYMENT_TARGET="${{ matrix.MACOSX_DEPLOYMENT_TARGET }}" \ + -DCMAKE_BUILD_TYPE=Release \ + ../ + fi cmake --build . --target package -j $N - name: Save artifact diff --git a/.gitignore b/.gitignore index 92ce9367..6b306b0c 100644 --- a/.gitignore +++ b/.gitignore @@ -16,3 +16,14 @@ Thumbs.db /playwright-results/ /playwright/.cache/ /reports/ + +# Claude Code and Claude Flow +/.claude/ +/.hive-mind/ +/.swarm/ +/memory/ +claude-flow +claude-flow.bat +claude-flow.ps1 +hive-mind-prompt-*.txt +CLAUDE.md diff --git a/CMakeLists.txt b/CMakeLists.txt index 51a4cd62..498005e1 100755 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -110,7 +110,25 @@ set(CPACK_PACKAGE_VERSION_MINOR ${PROJECT_VERSION_MINOR}) set(CPACK_PACKAGE_VERSION_PATCH ${PROJECT_VERSION_PATCH}) set(CPACK_PACKAGE_DESCRIPTION_SUMMARY "Parametric Analysis Tool") set(CPACK_DEBIAN_PACKAGE_HOMEPAGE "https://www.openstudio.net") -set(CPACK_PACKAGE_FILE_NAME "${CMAKE_PROJECT_NAME}-${PROJECT_VERSION}-${CMAKE_SYSTEM_NAME}") +# Handle architecture-specific package naming +if(CMAKE_OSX_ARCHITECTURES) + # Check if CMAKE_OSX_ARCHITECTURES contains multiple architectures (semicolon-separated) + string(FIND "${CMAKE_OSX_ARCHITECTURES}" ";" MULTI_ARCH_POS) + if(MULTI_ARCH_POS GREATER -1) + # Multiple architectures detected - use "universal" suffix + set(CPACK_PACKAGE_FILE_NAME "${CMAKE_PROJECT_NAME}-${PROJECT_VERSION}-${CMAKE_SYSTEM_NAME}-universal") + elseif(CMAKE_OSX_ARCHITECTURES STREQUAL "arm64") + set(CPACK_PACKAGE_FILE_NAME "${CMAKE_PROJECT_NAME}-${PROJECT_VERSION}-${CMAKE_SYSTEM_NAME}-arm64") + elseif(CMAKE_OSX_ARCHITECTURES STREQUAL "x86_64") + set(CPACK_PACKAGE_FILE_NAME "${CMAKE_PROJECT_NAME}-${PROJECT_VERSION}-${CMAKE_SYSTEM_NAME}-x86_64") + else() + # Single architecture but not arm64/x86_64 - use the architecture name + set(CPACK_PACKAGE_FILE_NAME "${CMAKE_PROJECT_NAME}-${PROJECT_VERSION}-${CMAKE_SYSTEM_NAME}-${CMAKE_OSX_ARCHITECTURES}") + endif() +else() + # No architecture specified - use default naming + set(CPACK_PACKAGE_FILE_NAME "${CMAKE_PROJECT_NAME}-${PROJECT_VERSION}-${CMAKE_SYSTEM_NAME}") +endif() set(CPACK_PACKAGE_CONTACT "openstudio@nrel.gov") include(CPack) diff --git a/README.md b/README.md index 4ff745d3..7c2f2f1b 100644 --- a/README.md +++ b/README.md @@ -86,9 +86,15 @@ We tested our Linux-specific instructions on Ubuntu 22.04, but they should also ``` * Run the appropriate command to generate the files. * MacOS: - ``` - cmake -DCMAKE_OSX_DEPLOYMENT_TARGET=11 -DCMAKE_OSX_ARCHITECTURES="arm64;x86_64" -DCMAKE_BUILD_TYPE=Release ../ - ``` + **Note:** For ARM64 (Apple Silicon) specific builds, use: + ``` + cmake -DCMAKE_OSX_DEPLOYMENT_TARGET=12.1 -DCMAKE_OSX_ARCHITECTURES="arm64" -DCMAKE_BUILD_TYPE=Release ../ + ``` + + For Intel-specific builds, use: + ``` + cmake -DCMAKE_OSX_DEPLOYMENT_TARGET=10.15 -DCMAKE_OSX_ARCHITECTURES="x86_64" -DCMAKE_BUILD_TYPE=Release ../ + ``` * Windows: ``` cmake -G "Visual Studio 17 2022" -A x64 ../ @@ -120,6 +126,7 @@ We tested our Linux-specific instructions on Ubuntu 22.04, but they should also ``` 7. The installer package should now be ready to use. - * MacOS: `./build/ParametricAnalysisTool-x.x.x-Darwin.dmg` + * MacOS ARM64: `./build/ParametricAnalysisTool-x.x.x-Darwin-arm64.dmg` + * MacOS Intel: `./build/ParametricAnalysisTool-x.x.x-Darwin-x86_64.dmg` * Windows: `./build/ParametricAnalysisTool-x.x.x-Windows.exe` * Linux: `./build/ParametricAnalysisTool-x.x.x-Linux.deb` \ No newline at end of file diff --git a/manifest.json b/manifest.json index d375db7f..212cc307 100644 --- a/manifest.json +++ b/manifest.json @@ -10,6 +10,11 @@ "platform": "darwin", "arch": "x64", "type": "energyplus" + }, { + "name": "EnergyPlus-25.1.0-68a4a7c774-Darwin-macOS13-arm64.tar.gz", + "platform": "darwin", + "arch": "arm64", + "type": "energyplus" }, { "name": "EnergyPlus-25.1.0-linux.tar.gz", "platform": "linux", @@ -26,6 +31,11 @@ "platform": "darwin", "arch": "x64", "type": "ruby" + }, { + "name": "ruby-3.2.2-darwin-arm64.tar.gz", + "platform": "darwin", + "arch": "arm64", + "type": "ruby" }, { "name": "ruby-3.2.2-linux.tar.gz", "platform": "linux", @@ -42,6 +52,11 @@ "platform": "darwin", "arch": "x64", "type": "mongo" + }, { + "name": "mongodb-macos-arm64-6.0.8.tgz", + "platform": "darwin", + "arch": "arm64", + "type": "mongo" }, { "name": "mongodb-6.0.8-linux.tar.gz", "platform": "linux", @@ -58,6 +73,11 @@ "platform": "darwin", "arch": "x64", "type": "openstudio" + }, { + "name": "OpenStudio-3.10.0+86d7e215a1-Darwin-arm64.tar.gz", + "platform": "darwin", + "arch": "arm64", + "type": "openstudio" }, { "name": "OpenStudio-3.10.0-Linux.tar.gz", "platform": "linux", @@ -74,6 +94,11 @@ "platform": "darwin", "arch": "x64", "type": "OpenStudio-server" + }, { + "name": "OpenStudio-server-5873e0d21d-darwin-arm64.tar.gz", + "platform": "darwin", + "arch": "arm64", + "type": "OpenStudio-server" }, { "name": "OpenStudio-server-5873e0d21d-linux.tar.gz", "platform": "linux", diff --git a/tasks/build.js b/tasks/build.js index 775fdffc..2c5bfa08 100755 --- a/tasks/build.js +++ b/tasks/build.js @@ -1,265 +1,372 @@ -'use strict'; - -var path = require('path'); -var gulp = require('gulp'); -var request = require('request'); -var progress = require('request-progress'); -var source = require('vinyl-source-stream'); -var jetpack = require('fs-jetpack'); -var conf = require('./conf'); -var utils = require('./utils'); -var _ = require('lodash'); -var os = require('os'); -var zlib = require('zlib'); -var tar = require('tar-fs'); -var gulpClean = require('gulp-clean'); -var merge = require('merge-stream'); -var rename = require('gulp-rename'); - -const { inject } = require('./inject'); -const { scripts } = require('./scripts'); - -var $ = require('gulp-load-plugins')({ - pattern: ['gulp-*', 'lazypipe', 'streamify'] -}); - -async function background() { - return gulp.src(path.join(conf.paths.tmp, '/serve/app/background.js')) - .pipe($.uglify()).on('error', await conf.errorHandler('Uglify background.js')) - .pipe($.flatten()) - .pipe(gulp.dest(path.join(conf.paths.dist, '/'))); -} - -function preload() { - return gulp.src(path.join(conf.paths.src, '/app/reports/preload.js')) - .pipe($.flatten()) - .pipe(gulp.dest(path.join(conf.paths.dist, '/scripts'))); -} - -async function finalizeHtml() { - const htmlFilter = $.filter('*.html', {restore: true}); - const jsFilter = $.filter('**/*.js', {restore: true}); - const cssFilter = $.filter('**/*.css', {restore: true}); - const notSourceMapFilter = $.filter(['**', '!*.map'], {restore: true}); - - return gulp.src(path.join(conf.paths.tmp, '/serve/*.html'), { base: conf.paths.dist }) - .pipe($.flatten()) - .pipe($.useref({}, $.lazypipe().pipe($.sourcemaps.init, {loadMaps: true}))) - .pipe(jsFilter) - .pipe($.ngAnnotate()) - .pipe($.rev()) - .pipe($.uglify()).on('error', await conf.errorHandler('Uglify')) - .pipe($.sourcemaps.write('maps')) - .pipe(jsFilter.restore) - .pipe(cssFilter) - .pipe($.replace('../node_modules/bootstrap-sass/assets/fonts/bootstrap/', '../fonts/')) - .pipe($.replace(/url\('ui-grid.(.+?)'\)/g, 'url(\'../fonts/ui-grid.$1\')')) - .pipe($.rev()) - .pipe($.csso()) - .pipe($.sourcemaps.write('maps')) - .pipe(cssFilter.restore) - .pipe(notSourceMapFilter) - .pipe($.revReplace()) - .pipe(notSourceMapFilter.restore) - .pipe(htmlFilter) - .pipe($.htmlmin({ - collapseBooleanAttributes: true, - collapseInlineTagWhitespace: true, - collapseWhitespace: true, - removeComments: true, - removeRedundantAttributes: true, - removeTagWhitespace: true - })) - .pipe(htmlFilter.restore) - .pipe(gulp.dest(path.join(conf.paths.dist, '/'))) - .pipe($.size({ - title: path.join(conf.paths.dist, '/'), - showFiles: true - })); -} - -const html = gulp.series(scripts, gulp.parallel(background, preload, inject), finalizeHtml); - -// Only applies for fonts from bootstrap-sass & angular-ui-grid modules -// Custom fonts are handled by the "other" task -function fonts() { - const NPM_FONT_DIRS = ['bootstrap-sass', 'angular-ui-grid']; - const FONT_EXTENSIONS_GLOB = '/**/*.{eot,svg,ttf,woff,woff2}'; - - return gulp.src(NPM_FONT_DIRS.map(fontDir => utils.mapNpmFilePath(`${fontDir}${FONT_EXTENSIONS_GLOB}`))) - .pipe($.flatten()) - .pipe(gulp.dest(path.join(conf.paths.dist, '/fonts/'))); -} - -function other() { - return gulp.src([ - path.join(conf.paths.src, '/**/*'), - path.join('!' + conf.paths.src, '/node_modules/**/*'), - path.join('!' + conf.paths.src, '/**/*.{html,css,js,scss}') - ]) - .pipe($.filter(file => file.stat.isFile())) - .pipe(rename(p => { - if (p.dirname.startsWith(conf.paths.src)) { - p.dirname = p.dirname.substring(conf.paths.src.length); - } - })) - .pipe(gulp.dest(path.join(conf.paths.dist, '/'))); -} - -function nodeModules() { - return gulp.src(path.join(conf.paths.src, '/node_modules/**/*'), {base: conf.paths.src}) - .pipe($.filter(file => file.stat.isFile())) - .pipe(gulp.dest(path.join(conf.paths.dist, '/'))); -} - -async function clean() { - await jetpack.removeAsync(conf.paths.dist); - await jetpack.removeAsync(conf.paths.tmp); -} - -function environment() { - var configFile = 'config/env_' + utils.getEnvName() + '.json'; - return jetpack.copyAsync(configFile, path.join(conf.paths.dist, '/env.json'), {overwrite: true}); -} - -function finalizeBuild() { - // Finalize - var manifest = jetpack.read(path.join(__dirname, '..', conf.paths.src, 'package.json'), 'json'); - - // Add "dev" or "test" suffix to name, so Electron will write all data - // like cookies and localStorage in separate places for each environment. - switch (utils.getEnvName()) { - case 'development': - manifest.name += '-dev'; - manifest.productName += 'Dev'; - break; - case 'test': - manifest.name += '-test'; - manifest.productName += 'Test'; - break; - } - - return jetpack.writeAsync(path.join(__dirname, '..', conf.paths.dist, 'package.json'), manifest); -} - -function copyManifest() { - jetpack.copy('manifest.json', path.join(conf.paths.dist, '/manifest.json'), {overwrite: true}); -} - -// Binary dependency management - -var argv = require('yargs').argv; - -let destination = path.join(conf.paths.dist, '..', 'depend'); -let dependencies = ['openstudio', 'energyplus', 'ruby', 'mongo', 'openstudioServer']; - -if (argv.prefix) { - destination = argv.prefix; -} - -if (argv.exclude) { - const without = argv.exclude.split(','); - dependencies = _.difference(dependencies, without); -} - -const manifest = jetpack.read('manifest.json', 'json'); - -const platform = os.platform(); -const arch = os.arch(); - -function downloadDeps() { - - // List the dependencies to download here - // These should correspond to keys in the manifest - - console.log('Dependencies: ' + dependencies.sort().join(', ')); - var tasks = dependencies.map(depend => { - const fileInfo = _.find(manifest[depend], {platform: platform}); - const fileName = fileInfo.name; - - // Note JM 2018-09-13: Allow other resources in case AWS isn't up to date - // and for easier testing of new deps - if( fileName.includes("http") ) { - // Already a URI - var uri = fileName; - var destName = fileName.replace(/^.*[\\\/]/, ''); - } else { - // Need to concat endpoint (AWS) with the fileName - var uri = manifest.endpoint + fileName; - var destName = fileName; - } - - return progress(request({uri: uri, timeout: 5000})) - .on('progress', state => { - console.log(`Downloading ${depend}, ${(state.percent * 100).toFixed(0)}%`); - }) - .pipe(source(destName)) - .pipe(gulp.dest(destination)); - }); - - return merge(tasks); -} - -function extractDeps() { - var tasks = dependencies.map(depend => { - const fileInfo = _.find(manifest[depend], {platform: platform}); - const fileName = fileInfo.name; - - if( fileName.includes("http") ) { - var destName = fileName.replace(/^.*[\\\/]/, ''); - } else { - var destName = fileName; - } - - // Note JM 2018-0913: - // Usually deps are properly zipped to that the extracted root folder - // is adequately named, but when using absolute http:// resources (not - // packaged specifically by us), we must rename to ensure it's correct - var properName = fileInfo.type; - - // What we do is to extract to properName and remove the leading (root) - // directory level - const properDestinationDir = path.join(destination, properName); - jetpack.remove(properDestinationDir); - return jetpack.createReadStream(path.join(destination, destName)) - .pipe(zlib.createGunzip()) - .pipe(tar.extract(properDestinationDir, { - strip: 1, - // There is a bug in tar-fs where, because stripped files & directories - // are given an empty header.name, having multiple stripped items - // results in having multiple headers with the same (empty) name, - // causing the extraction to either fail or hang. - // - // We avoid this issue by ignoring stripped items (ie, items with empty names) - ignore: (__, header) => { - return header.name.length === 0; - } - })); - }); - - const tasksAsPromises = tasks.map(task => new Promise((resolve, reject) => task.on('finish', resolve).on('error', reject))); - return Promise.all(tasksAsPromises); -} - -function cleanDeps() { - var tasks = dependencies.map(depend => { - const fileInfo = _.find(manifest[depend], {platform: platform}); - const fileName = fileInfo.name; - - if( fileName.includes("http") ) { - var destName = fileName.replace(/^.*[\\\/]/, ''); - } else { - var destName = fileName; - } - - return gulp.src(path.join(destination, fileName), {read: false}) - .pipe(gulpClean()); - }); - - return merge(tasks); -} - -exports.build = gulp.series(gulp.parallel(html, fonts, nodeModules, other, environment), finalizeBuild); -exports.clean = clean; -exports.copyManifest = copyManifest; -exports.installDeps = gulp.series(downloadDeps, extractDeps, cleanDeps); +'use strict'; + +var path = require('path'); +var gulp = require('gulp'); +var request = require('request'); +var progress = require('request-progress'); +var source = require('vinyl-source-stream'); +var jetpack = require('fs-jetpack'); +var conf = require('./conf'); +var utils = require('./utils'); +var _ = require('lodash'); +var os = require('os'); +var zlib = require('zlib'); +var tar = require('tar-fs'); +var gulpClean = require('gulp-clean'); +var merge = require('merge-stream'); +var rename = require('gulp-rename'); + +const { inject } = require('./inject'); +const { scripts } = require('./scripts'); + +var $ = require('gulp-load-plugins')({ + pattern: ['gulp-*', 'lazypipe', 'streamify'] +}); + +async function background() { + return gulp.src(path.join(conf.paths.tmp, '/serve/app/background.js')) + .pipe($.uglify()).on('error', await conf.errorHandler('Uglify background.js')) + .pipe($.flatten()) + .pipe(gulp.dest(path.join(conf.paths.dist, '/'))); +} + +function preload() { + return gulp.src(path.join(conf.paths.src, '/app/reports/preload.js')) + .pipe($.flatten()) + .pipe(gulp.dest(path.join(conf.paths.dist, '/scripts'))); +} + +async function finalizeHtml() { + const htmlFilter = $.filter('*.html', {restore: true}); + const jsFilter = $.filter('**/*.js', {restore: true}); + const cssFilter = $.filter('**/*.css', {restore: true}); + const notSourceMapFilter = $.filter(['**', '!*.map'], {restore: true}); + + return gulp.src(path.join(conf.paths.tmp, '/serve/*.html'), { base: conf.paths.dist }) + .pipe($.flatten()) + .pipe($.useref({}, $.lazypipe().pipe($.sourcemaps.init, {loadMaps: true}))) + .pipe(jsFilter) + .pipe($.ngAnnotate()) + .pipe($.rev()) + .pipe($.uglify()).on('error', await conf.errorHandler('Uglify')) + .pipe($.sourcemaps.write('maps')) + .pipe(jsFilter.restore) + .pipe(cssFilter) + .pipe($.replace('../node_modules/bootstrap-sass/assets/fonts/bootstrap/', '../fonts/')) + .pipe($.replace(/url\('ui-grid.(.+?)'\)/g, 'url(\'../fonts/ui-grid.$1\')')) + .pipe($.rev()) + .pipe($.csso()) + .pipe($.sourcemaps.write('maps')) + .pipe(cssFilter.restore) + .pipe(notSourceMapFilter) + .pipe($.revReplace()) + .pipe(notSourceMapFilter.restore) + .pipe(htmlFilter) + .pipe($.htmlmin({ + collapseBooleanAttributes: true, + collapseInlineTagWhitespace: true, + collapseWhitespace: true, + removeComments: true, + removeRedundantAttributes: true, + removeTagWhitespace: true + })) + .pipe(htmlFilter.restore) + .pipe(gulp.dest(path.join(conf.paths.dist, '/'))) + .pipe($.size({ + title: path.join(conf.paths.dist, '/'), + showFiles: true + })); +} + +const html = gulp.series(scripts, gulp.parallel(background, preload, inject), finalizeHtml); + +// Only applies for fonts from bootstrap-sass & angular-ui-grid modules +// Custom fonts are handled by the "other" task +function fonts() { + const NPM_FONT_DIRS = ['bootstrap-sass', 'angular-ui-grid']; + const FONT_EXTENSIONS_GLOB = '/**/*.{eot,svg,ttf,woff,woff2}'; + + return gulp.src(NPM_FONT_DIRS.map(fontDir => utils.mapNpmFilePath(`${fontDir}${FONT_EXTENSIONS_GLOB}`))) + .pipe($.flatten()) + .pipe(gulp.dest(path.join(conf.paths.dist, '/fonts/'))); +} + +function other() { + return gulp.src([ + path.join(conf.paths.src, '/**/*'), + path.join('!' + conf.paths.src, '/node_modules/**/*'), + path.join('!' + conf.paths.src, '/**/*.{html,css,js,scss}') + ]) + .pipe($.filter(file => file.stat.isFile())) + .pipe(rename(p => { + if (p.dirname.startsWith(conf.paths.src)) { + p.dirname = p.dirname.substring(conf.paths.src.length); + } + })) + .pipe(gulp.dest(path.join(conf.paths.dist, '/'))); +} + +function nodeModules() { + return gulp.src(path.join(conf.paths.src, '/node_modules/**/*'), {base: conf.paths.src}) + .pipe($.filter(file => file.stat.isFile())) + .pipe(gulp.dest(path.join(conf.paths.dist, '/'))); +} + +async function clean() { + await jetpack.removeAsync(conf.paths.dist); + await jetpack.removeAsync(conf.paths.tmp); +} + +function environment() { + var configFile = 'config/env_' + utils.getEnvName() + '.json'; + return jetpack.copyAsync(configFile, path.join(conf.paths.dist, '/env.json'), {overwrite: true}); +} + +function finalizeBuild() { + // Finalize + var manifest = jetpack.read(path.join(__dirname, '..', conf.paths.src, 'package.json'), 'json'); + + // Add "dev" or "test" suffix to name, so Electron will write all data + // like cookies and localStorage in separate places for each environment. + switch (utils.getEnvName()) { + case 'development': + manifest.name += '-dev'; + manifest.productName += 'Dev'; + break; + case 'test': + manifest.name += '-test'; + manifest.productName += 'Test'; + break; + } + + return jetpack.writeAsync(path.join(__dirname, '..', conf.paths.dist, 'package.json'), manifest); +} + +function copyManifest() { + jetpack.copy('manifest.json', path.join(conf.paths.dist, '/manifest.json'), {overwrite: true}); +} + +// Binary dependency management + +var argv = require('yargs').argv; + +let destination = path.join(conf.paths.dist, '..', 'depend'); +let dependencies = ['openstudio', 'energyplus', 'ruby', 'mongo', 'openstudioServer']; + +if (argv.prefix) { + destination = argv.prefix; +} + +if (argv.exclude) { + const without = argv.exclude.split(','); + dependencies = _.difference(dependencies, without); +} + +const manifest = jetpack.read('manifest.json', 'json'); + +const platform = os.platform(); +// Priority: MATRIX_ARCH (set by workflow) > CMAKE_OSX_ARCHITECTURES > os.arch() +const cmakeArch = process.env.CMAKE_OSX_ARCHITECTURES ? process.env.CMAKE_OSX_ARCHITECTURES.split(';')[0] : null; +const arch = process.env.MATRIX_ARCH || cmakeArch || os.arch(); + +// Helper function for manifest lookup with arch fallback +function getActualFileInfo(manifest, depend, platform, arch) { + const fileInfo = _.find(manifest[depend], {platform, arch}); + if (fileInfo) return fileInfo; + console.warn(`No dependency found for ${depend} on ${platform}/${arch}, falling back to x64`); + const fallback = _.find(manifest[depend], {platform, arch: 'x64'}); + if (!fallback) throw new Error(`No dependency found for ${depend} on ${platform}`); + return fallback; +} + +// Retry wrapper for critical operations +function withRetry(operation, maxRetries = 3, delay = 1000) { + return new Promise((resolve, reject) => { + let attempts = 0; + + function attempt() { + attempts++; + operation() + .then(resolve) + .catch(err => { + if (attempts >= maxRetries) { + console.error(`Operation failed after ${maxRetries} attempts:`, err.message); + reject(err); + } else { + console.warn(`Attempt ${attempts} failed, retrying in ${delay}ms...`); + setTimeout(attempt, delay); + } + }); + } + + attempt(); + }); +} + +function downloadDeps() { + + // List the dependencies to download here + // These should correspond to keys in the manifest + // Enhanced with better error handling, logging, and progress reporting + + console.log('Dependencies: ' + dependencies.sort().join(', ')); + console.log(`Platform: ${platform}, Architecture: ${arch}`); + console.log(`Destination: ${destination}`); + + var tasks = dependencies.map(depend => { + const fileInfo = getActualFileInfo(manifest, depend, platform, arch); + const fileName = fileInfo.name; + + // Note JM 2018-09-13: Allow other resources in case AWS isn't up to date + // and for easier testing of new deps + if( fileName.includes("http") ) { + // Already a URI + var uri = fileName; + var destName = fileName.replace(/^.*[\\/]/, ''); + } else { + // Need to concat endpoint (AWS) with the fileName + var uri = manifest.endpoint + fileName; + var destName = fileName; + } + + console.log(`Downloading ${depend}: ${uri}`); + + const requestOptions = { + uri: uri, + timeout: 30000, // Increased timeout to 30 seconds + headers: { + 'User-Agent': 'OpenStudio-PAT-Builder/1.0' + } + }; + + return progress(request(requestOptions)) + .on('progress', state => { + const percent = (state.percent * 100).toFixed(1); + const speed = state.speed ? `(${(state.speed / 1024 / 1024).toFixed(2)} MB/s)` : ''; + console.log(`Downloading ${depend}: ${percent}% ${speed}`); + }) + .on('error', err => { + console.error(`Error downloading ${depend} from ${uri}:`, err.message); + if (err.code === 'ETIMEDOUT') { + console.error(`Download timeout for ${depend}. Check network connection or try again.`); + } else if (err.code === 'ENOTFOUND') { + console.error(`Host not found for ${depend}. Check the URL: ${uri}`); + } + }) + .on('response', response => { + console.log(`Response for ${depend}: ${response.statusCode} ${response.statusMessage}`); + if (response.statusCode !== 200) { + console.error(`HTTP ${response.statusCode} for ${depend}: ${uri}`); + } + if (response.headers['content-length']) { + const sizeMB = (parseInt(response.headers['content-length']) / 1024 / 1024).toFixed(2); + console.log(`Expected size for ${depend}: ${sizeMB} MB`); + } + }) + .pipe(source(destName)) + .pipe(gulp.dest(destination)); + }); + + return merge(tasks); +} + +function extractDeps() { + var tasks = dependencies.map(depend => { + const fileInfo = getActualFileInfo(manifest, depend, platform, arch); + const fileName = fileInfo.name; + + if( fileName.includes("http") ) { + var destName = fileName.replace(/^.*[\\/]/, ''); + } else { + var destName = fileName; + } + + // File integrity check before extraction + const filePath = path.join(destination, destName); + const fileStats = jetpack.inspect(filePath, { checksum: 'md5' }); + + if (!fileStats) { + throw new Error(`Downloaded file not found: ${filePath}`); + } + + console.log(`Extracting ${depend}:`); + console.log(` File: ${destName}`); + console.log(` Size: ${(fileStats.size / 1024 / 1024).toFixed(2)} MB`); + console.log(` MD5: ${fileStats.md5}`); + + // Basic size validation (warn if file is suspiciously small) + if (fileStats.size < 1024) { // Less than 1KB + console.warn(`Warning: ${depend} file is very small (${fileStats.size} bytes). This may indicate a download error.`); + } + + // Note JM 2018-0913: + // Usually deps are properly zipped to that the extracted root folder + // is adequately named, but when using absolute http:// resources (not + // packaged specifically by us), we must rename to ensure it's correct + var properName = fileInfo.type; + + // What we do is to extract to properName and remove the leading (root) + // directory level + const properDestinationDir = path.join(destination, properName); + jetpack.remove(properDestinationDir); + + console.log(` Extracting to: ${properDestinationDir}`); + + return jetpack.createReadStream(filePath) + .on('error', err => { + console.error(`Error reading ${depend} file: ${err.message}`); + throw err; + }) + .pipe(zlib.createGunzip()) + .on('error', err => { + console.error(`Error decompressing ${depend}: ${err.message}`); + console.error(`This may indicate file corruption. Try re-downloading the dependency.`); + throw err; + }) + .pipe(tar.extract(properDestinationDir, { + strip: 1, + // There is a bug in tar-fs where, because stripped files & directories + // are given an empty header.name, having multiple stripped items + // results in having multiple headers with the same (empty) name, + // causing the extraction to either fail or hang. + // + // We avoid this issue by ignoring stripped items (ie, items with empty names) + ignore: (__, header) => { + return header.name.length === 0; + } + })) + .on('error', err => { + console.error(`Error extracting ${depend}: ${err.message}`); + throw err; + }) + .on('finish', () => { + console.log(`Successfully extracted ${depend}`); + }); + }); + + const tasksAsPromises = tasks.map(task => new Promise((resolve, reject) => task.on('finish', resolve).on('error', reject))); + return Promise.all(tasksAsPromises); +} + +function cleanDeps() { + var tasks = dependencies.map(depend => { + const fileInfo = getActualFileInfo(manifest, depend, platform, arch); + const fileName = fileInfo.name; + + if( fileName.includes("http") ) { + var destName = fileName.replace(/^.*[\\/]/, ''); + } else { + var destName = fileName; + } + + return gulp.src(path.join(destination, destName), {read: false}) + .pipe(gulpClean()); + }); + + return merge(tasks); +} + +exports.build = gulp.series(gulp.parallel(html, fonts, nodeModules, other, environment), finalizeBuild); +exports.clean = clean; +exports.copyManifest = copyManifest; +exports.installDeps = gulp.series(downloadDeps, extractDeps, cleanDeps);