From 077c9aaef3076dda5fe115756d6f754f39919a73 Mon Sep 17 00:00:00 2001 From: Li Haoyi Date: Tue, 18 Mar 2025 16:55:21 +0800 Subject: [PATCH 1/2] squash --- .github/workflows/publish-artifacts.yml | 39 + .github/workflows/run-tests.yml | 65 ++ .mill-version | 3 +- .scalafmt.conf | 5 +- Readme.adoc | 857 +++++++++++++----- build.mill | 225 +++++ build.sc | 9 +- mill | 6 +- os/src-2/Macros.scala | 83 ++ os/src-3/Macros.scala | 87 ++ os/src-3/acyclic.scala | 6 + os/src-jvm/ResourceApi.scala | 7 + os/src-native/ResourceApi.scala | 2 + os/src/FileOps.scala | 96 +- os/src/Internals.scala | 13 +- os/src/Model.scala | 31 +- os/src/Path.scala | 151 ++- os/src/PermsOps.scala | 7 +- os/src/ProcessOps.scala | 660 ++++++++++++++ os/src/ReadWriteOps.scala | 34 +- os/src/Source.scala | 7 + os/src/StatOps.scala | 1 + os/src/SubProcess.scala | 588 ++++++++++++ os/src/TempOps.scala | 9 +- os/src/ZipOps.scala | 320 +++++++ os/src/experimental.scala | 8 + os/src/package.scala | 79 ++ os/test/resources/restricted/File.txt | 1 + os/test/resources/restricted/Multi Line.txt | 4 + os/test/resources/restricted/folder1/one.txt | 1 + .../restricted/folder2/nestedA/a.txt | 1 + .../restricted/folder2/nestedB/b.txt | 1 + .../resources/restricted/misc/broken-symlink | 0 .../resources/restricted/misc/file-symlink | 1 + .../resources/restricted/misc/folder-symlink | 1 + os/test/src-jvm/ExampleTests.scala | 83 +- os/test/src-jvm/OpTestsJvmOnly.scala | 268 +----- .../src-jvm/PathTestsCustomFilesystem.scala | 102 ++- os/test/src-jvm/ProcessPipelineTests.scala | 2 +- .../SpawningSubprocessesNewTests.scala | 262 ++++++ .../src-jvm/SpawningSubprocessesTests.scala | 258 +++--- os/test/src-jvm/ZipOpJvmTests.scala | 195 ++++ os/test/src/CheckerTests.scala | 484 ++++++++++ os/test/src/FilesystemMetadataTests.scala | 76 ++ os/test/src/FilesystemPermissionsTests.scala | 54 ++ os/test/src/ListingWalkingTests.scala | 106 +++ .../src/ManipulatingFilesFoldersTests.scala | 277 ++++++ os/test/src/OpTests.scala | 231 ++++- os/test/src/PathTests.scala | 164 +++- os/test/src/PathTestsJvmOnly.scala | 50 + os/test/src/ReadingWritingTests.scala | 137 +++ os/test/src/SegmentsFromStringTests.scala | 39 + os/test/src/SourceTests.scala | 21 + os/test/src/SubprocessTests.scala | 252 +++++ os/test/src/TestUtil.scala | 143 +++ os/test/src/ZipOpTests.scala | 237 +++++ os/test/testJarExit/src/TestJarExit.java | 11 + os/test/testJarReader/src/TestJarReader.java | 24 + os/test/testJarWriter/src/TestJarWriter.java | 24 + .../src/TestSpawnExitHook.scala | 15 + .../src/TestSpawnExitHook2.java | 12 + os/watch/test/src/WatchTests.scala | 36 +- 62 files changed, 6166 insertions(+), 805 deletions(-) create mode 100644 .github/workflows/publish-artifacts.yml create mode 100644 .github/workflows/run-tests.yml create mode 100644 build.mill create mode 100644 os/src-2/Macros.scala create mode 100644 os/src-3/Macros.scala create mode 100644 os/src-3/acyclic.scala create mode 100644 os/src-jvm/ResourceApi.scala create mode 100644 os/src-native/ResourceApi.scala create mode 100644 os/src/ProcessOps.scala create mode 100644 os/src/SubProcess.scala create mode 100644 os/src/ZipOps.scala create mode 100644 os/src/experimental.scala create mode 100644 os/src/package.scala create mode 100644 os/test/resources/restricted/File.txt create mode 100644 os/test/resources/restricted/Multi Line.txt create mode 100644 os/test/resources/restricted/folder1/one.txt create mode 100644 os/test/resources/restricted/folder2/nestedA/a.txt create mode 100644 os/test/resources/restricted/folder2/nestedB/b.txt create mode 100644 os/test/resources/restricted/misc/broken-symlink create mode 100644 os/test/resources/restricted/misc/file-symlink create mode 120000 os/test/resources/restricted/misc/folder-symlink create mode 100644 os/test/src-jvm/SpawningSubprocessesNewTests.scala create mode 100644 os/test/src-jvm/ZipOpJvmTests.scala create mode 100644 os/test/src/CheckerTests.scala create mode 100644 os/test/src/FilesystemMetadataTests.scala create mode 100644 os/test/src/FilesystemPermissionsTests.scala create mode 100644 os/test/src/ListingWalkingTests.scala create mode 100644 os/test/src/ManipulatingFilesFoldersTests.scala create mode 100644 os/test/src/PathTestsJvmOnly.scala create mode 100644 os/test/src/ReadingWritingTests.scala create mode 100644 os/test/src/SegmentsFromStringTests.scala create mode 100644 os/test/src/SourceTests.scala create mode 100644 os/test/src/SubprocessTests.scala create mode 100644 os/test/src/TestUtil.scala create mode 100644 os/test/src/ZipOpTests.scala create mode 100644 os/test/testJarExit/src/TestJarExit.java create mode 100644 os/test/testJarReader/src/TestJarReader.java create mode 100644 os/test/testJarWriter/src/TestJarWriter.java create mode 100644 os/test/testSpawnExitHook/src/TestSpawnExitHook.scala create mode 100644 os/test/testSpawnExitHook2/src/TestSpawnExitHook2.java diff --git a/.github/workflows/publish-artifacts.yml b/.github/workflows/publish-artifacts.yml new file mode 100644 index 00000000..6d875faa --- /dev/null +++ b/.github/workflows/publish-artifacts.yml @@ -0,0 +1,39 @@ +name: Publish Artifacts + +on: + push: + tags: + - '**' + workflow_dispatch: + +jobs: + publish-sonatype: + if: github.repository == 'com-lihaoyi/os-lib' + runs-on: ubuntu-latest + env: + MILL_SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} + MILL_SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }} + MILL_PGP_SECRET_BASE64: ${{ secrets.SONATYPE_PGP_PRIVATE_KEY }} + MILL_PGP_PASSPHRASE: ${{ secrets.SONATYPE_PGP_PRIVATE_KEY_PASSWORD }} + LANG: "en_US.UTF-8" + LC_MESSAGES: "en_US.UTF-8" + LC_ALL: "en_US.UTF-8" + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-java@v3 + with: + distribution: 'temurin' + java-version: 11 + - name: Publish to Maven Central + run: ./mill -i mill.scalalib.PublishModule/ + + - name: Create GitHub Release + id: create_gh_release + uses: actions/create-release@v1.1.4 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # This token is provided by Actions, you do not need to create your own token + with: + tag_name: ${{ github.ref }} + release_name: ${{ github.ref }} + draft: false diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml new file mode 100644 index 00000000..879d0bcf --- /dev/null +++ b/.github/workflows/run-tests.yml @@ -0,0 +1,65 @@ +name: Run Tests + +on: + push: + branches: + - main + pull_request: + branches: + - main + +jobs: + test: + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] + java-version: [11, 17] + + runs-on: ${{ matrix.os }} + + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - uses: actions/setup-java@v4 + with: + distribution: 'temurin' + java-version: ${{ matrix.java-version }} + - name: Fetch millw launcher (Windows) + run: curl -Lo mill.bat "https://raw.githubusercontent.com/lefou/millw/main/millw.bat" + if: matrix.os == 'windows-latest' + + - run: ./mill -i -k __.test + if: matrix.os != 'windows-latest' + - run: ./mill.bat -i -k __.jvm.__.test + if: matrix.os == 'windows-latest' + + check-bin-compat: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - uses: actions/setup-java@v4 + with: + distribution: 'temurin' + java-version: 11 + + - run: ./mill -i -k __.mimaReportBinaryIssues + + check-formatting: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - uses: actions/setup-java@v4 + with: + distribution: 'temurin' + java-version: 17 + + - run: ./mill -i mill.scalalib.scalafmt.ScalafmtModule/checkFormatAll __.sources diff --git a/.mill-version b/.mill-version index ecd2d5d9..ac454c6a 100644 --- a/.mill-version +++ b/.mill-version @@ -1,2 +1 @@ -0.11.6 - +0.12.0 diff --git a/.scalafmt.conf b/.scalafmt.conf index fdf0862d..aa39efda 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version = "3.7.17" +version = "3.8.3" align.preset = none align.openParenCallSite = false @@ -19,3 +19,6 @@ newlines.source = keep runner.dialect = scala213 +project.excludePaths = [ + "glob:**/src-3/**" +] \ No newline at end of file diff --git a/Readme.adoc b/Readme.adoc index 14420486..a9767b57 100644 --- a/Readme.adoc +++ b/Readme.adoc @@ -1,5 +1,8 @@ = OS-Lib -:version: 0.9.3 +:version: 0.11.4 +:toc-placement: preamble +:toclevels: 3 +:toc: :link-geny: https://github.com/com-lihaoyi/geny :link-oslib: https://github.com/com-lihaoyi/os-lib :link-oslib-gitter: https://gitter.im/lihaoyi/os-lib @@ -8,7 +11,7 @@ :idprefix: :idseparator: - -image:{link-oslib}/actions/workflows/build.yml/badge.svg[Build Status,link={link-oslib}/actions] +image:{link-oslib}/actions/workflows/run-tests.yml/badge.svg[Build Status,link={link-oslib}/actions] image:https://badges.gitter.im/Join%20Chat.svg[Gitter Chat,link={link-oslib-gitter}] image:https://img.shields.io/badge/patreon-sponsor-ff69b4.svg[Patreon,link=https://www.patreon.com/lihaoyi] image:https://javadoc.io/badge2/com.lihaoyi/os-lib_3/scaladoc.svg[API Docs (Scala 3),link=https://javadoc.io/doc/com.lihaoyi/os-lib_3] @@ -16,7 +19,7 @@ image:https://javadoc.io/badge2/com.lihaoyi/os-lib_3/scaladoc.svg[API Docs (Scal [source,scala] ---- // Make sure working directory exists and is empty -val wd = os.pwd/"out"/"splash" +val wd = os.pwd/"out/splash" os.remove.all(wd) os.makeDir.all(wd) @@ -176,9 +179,9 @@ val largestThree = os.walk(wd) .take(3) largestThree ==> Seq( - (711, wd / "misc" / "binary.png"), + (711, wd / "misc/binary.png"), (81, wd / "Multi Line.txt"), - (22, wd / "folder1" / "one.txt") + (22, wd / "folder1/one.txt") ) ---- @@ -187,7 +190,7 @@ largestThree ==> Seq( [source,scala] ---- // Move all files inside the "misc" folder out of it -import os.{GlobSyntax, /} +import os./ os.list(wd / "misc").map(os.move.matching { case p/"misc"/x => p/x } ) ---- @@ -226,7 +229,7 @@ read from if the source supports seeking. [source,scala] ---- os.read(wd / "File.txt") ==> "I am cow" -os.read(wd / "folder1" / "one.txt") ==> "Contents of folder one" +os.read(wd / "folder1/one.txt") ==> "Contents of folder one" os.read(wd / "Multi Line.txt") ==> """I am cow |Hear me moo @@ -249,7 +252,7 @@ supports seeking. [source,scala] ---- os.read.bytes(wd / "File.txt") ==> "I am cow".getBytes -os.read.bytes(wd / "misc" / "binary.png").length ==> 711 +os.read.bytes(wd / "misc/binary.png").length ==> 711 ---- ==== `os.read.chunks` @@ -433,9 +436,9 @@ os.write.append(wd / "File.txt", ",\nI weigh twice as much as you") os.read(wd / "File.txt") ==> "I am cow, hear me moo,\nI weigh twice as much as you" -os.read.bytes(wd / "misc" / "binary.png").length ==> 711 -os.write.append(wd / "misc" / "binary.png", Array[Byte](1, 2, 3)) -os.read.bytes(wd / "misc" / "binary.png").length ==> 714 +os.read.bytes(wd / "misc/binary.png").length ==> 711 +os.write.append(wd / "misc/binary.png", Array[Byte](1, 2, 3)) +os.read.bytes(wd / "misc/binary.png").length ==> 714 ---- ==== `os.write.over` @@ -532,10 +535,10 @@ them. You can disable sorted by passing in the flag `sort = false`. [source,scala] ---- -os.list(wd / "folder1") ==> Seq(wd / "folder1" / "one.txt") +os.list(wd / "folder1") ==> Seq(wd / "folder1/one.txt") os.list(wd / "folder2") ==> Seq( - wd / "folder2" / "nestedA", - wd / "folder2" / "nestedB" + wd / "folder2/nestedA", + wd / "folder2/nestedB" ) ---- @@ -595,35 +598,35 @@ if `preOrder = false`. [source,scala] ---- -os.walk(wd / "folder1") ==> Seq(wd / "folder1" / "one.txt") +os.walk(wd / "folder1") ==> Seq(wd / "folder1/one.txt") os.walk(wd / "folder1", includeTarget = true) ==> Seq( wd / "folder1", - wd / "folder1" / "one.txt" + wd / "folder1/one.txt" ) os.walk(wd / "folder2") ==> Seq( - wd / "folder2" / "nestedA", - wd / "folder2" / "nestedA" / "a.txt", - wd / "folder2" / "nestedB", - wd / "folder2" / "nestedB" / "b.txt" + wd / "folder2/nestedA", + wd / "folder2/nestedA/a.txt", + wd / "folder2/nestedB", + wd / "folder2/nestedB/b.txt" ) os.walk(wd / "folder2", preOrder = false) ==> Seq( - wd / "folder2" / "nestedA" / "a.txt", - wd / "folder2" / "nestedA", - wd / "folder2" / "nestedB" / "b.txt", - wd / "folder2" / "nestedB" + wd / "folder2/nestedA/a.txt", + wd / "folder2/nestedA", + wd / "folder2/nestedB/b.txt", + wd / "folder2/nestedB" ) os.walk(wd / "folder2", maxDepth = 1) ==> Seq( - wd / "folder2" / "nestedA", - wd / "folder2" / "nestedB" + wd / "folder2/nestedA", + wd / "folder2/nestedB" ) os.walk(wd / "folder2", skip = _.last == "nestedA") ==> Seq( - wd / "folder2" / "nestedB", - wd / "folder2" / "nestedB" / "b.txt" + wd / "folder2/nestedB", + wd / "folder2/nestedB/b.txt" ) ---- @@ -652,11 +655,11 @@ val filesSortedBySize = os.walk.attrs(wd / "misc", followLinks = true) .collect{case (p, attrs) if attrsisFile => p} filesSortedBySize ==> Seq( - wd / "misc" / "echo", - wd / "misc" / "file-symlink", - wd / "misc" / "echo_with_wd", - wd / "misc" / "folder-symlink" / "one.txt", - wd / "misc" / "binary.png" + wd / "misc/echo", + wd / "misc/file-symlink", + wd / "misc/echo_with_wd", + wd / "misc/folder-symlink/one.txt", + wd / "misc/binary.png" ) ---- @@ -729,10 +732,10 @@ os.exists(wd / "File.txt") ==> true os.exists(wd / "folder1") ==> true os.exists(wd / "doesnt-exist") ==> false -os.exists(wd / "misc" / "file-symlink") ==> true -os.exists(wd / "misc" / "folder-symlink") ==> true -os.exists(wd / "misc" / "broken-symlink") ==> false -os.exists(wd / "misc" / "broken-symlink", followLinks = false) ==> true +os.exists(wd / "misc/file-symlink") ==> true +os.exists(wd / "misc/folder-symlink") ==> true +os.exists(wd / "misc/broken-symlink") ==> false +os.exists(wd / "misc/broken-symlink", followLinks = false) ==> true ---- ==== `os.move` @@ -748,13 +751,13 @@ path already exists, or is within the source path. [source,scala] ---- -os.list(wd / "folder1") ==> Seq(wd / "folder1" / "one.txt") -os.move(wd / "folder1" / "one.txt", wd / "folder1" / "first.txt") -os.list(wd / "folder1") ==> Seq(wd / "folder1" / "first.txt") +os.list(wd / "folder1") ==> Seq(wd / "folder1/one.txt") +os.move(wd / "folder1/one.txt", wd / "folder1/first.txt") +os.list(wd / "folder1") ==> Seq(wd / "folder1/first.txt") -os.list(wd / "folder2") ==> Seq(wd / "folder2" / "nestedA", wd / "folder2" / "nestedB") -os.move(wd / "folder2" / "nestedA", wd / "folder2" / "nestedC") -os.list(wd / "folder2") ==> Seq(wd / "folder2" / "nestedB", wd / "folder2" / "nestedC") +os.list(wd / "folder2") ==> Seq(wd / "folder2/nestedA", wd / "folder2/nestedB") +os.move(wd / "folder2/nestedA", wd / "folder2/nestedC") +os.list(wd / "folder2") ==> Seq(wd / "folder2/nestedB", wd / "folder2/nestedC") os.read(wd / "File.txt") ==> "I am cow" os.move(wd / "Multi Line.txt", wd / "File.txt", replaceExisting = true) @@ -780,19 +783,19 @@ e.g. to rename all `.txt` files within a folder tree to `.data`: ---- import os.{GlobSyntax, /} os.walk(wd / "folder2") ==> Seq( - wd / "folder2" / "nestedA", - wd / "folder2" / "nestedA" / "a.txt", - wd / "folder2" / "nestedB", - wd / "folder2" / "nestedB" / "b.txt" + wd / "folder2/nestedA", + wd / "folder2/nestedA/a.txt", + wd / "folder2/nestedB", + wd / "folder2/nestedB/b.txt" ) os.walk(wd/'folder2).collect(os.move.matching{case p/g"$x.txt" => p/g"$x.data"}) os.walk(wd / "folder2") ==> Seq( - wd / "folder2" / "nestedA", - wd / "folder2" / "nestedA" / "a.data", - wd / "folder2" / "nestedB", - wd / "folder2" / "nestedB" / "b.data" + wd / "folder2/nestedA", + wd / "folder2/nestedA/a.data", + wd / "folder2/nestedB", + wd / "folder2/nestedB/b.data" ) ---- @@ -807,9 +810,9 @@ Move the given file or folder _into_ the destination folder [source,scala] ---- -os.list(wd / "folder1") ==> Seq(wd / "folder1" / "one.txt") +os.list(wd / "folder1") ==> Seq(wd / "folder1/one.txt") os.move.into(wd / "File.txt", wd / "folder1") -os.list(wd / "folder1") ==> Seq(wd / "folder1" / "File.txt", wd / "folder1" / "one.txt") +os.list(wd / "folder1") ==> Seq(wd / "folder1/File.txt", wd / "folder1/one.txt") ---- ==== `os.move.over` @@ -824,9 +827,9 @@ folder than may already be present at that path [source,scala] ---- -os.list(wd / "folder2") ==> Seq(wd / "folder2" / "nestedA", wd / "folder2" / "nestedB") +os.list(wd / "folder2") ==> Seq(wd / "folder2/nestedA", wd / "folder2/nestedB") os.move.over(wd / "folder1", wd / "folder2") -os.list(wd / "folder2") ==> Seq(wd / "folder2" / "one.txt") +os.list(wd / "folder2") ==> Seq(wd / "folder2/one.txt") ---- ==== `os.copy` @@ -843,16 +846,16 @@ within the source path. [source,scala] ---- -os.list(wd / "folder1") ==> Seq(wd / "folder1" / "one.txt") -os.copy(wd / "folder1" / "one.txt", wd / "folder1" / "first.txt") -os.list(wd / "folder1") ==> Seq(wd / "folder1" / "first.txt", wd / "folder1" / "one.txt") +os.list(wd / "folder1") ==> Seq(wd / "folder1/one.txt") +os.copy(wd / "folder1/one.txt", wd / "folder1/first.txt") +os.list(wd / "folder1") ==> Seq(wd / "folder1/first.txt", wd / "folder1/one.txt") -os.list(wd / "folder2") ==> Seq(wd / "folder2" / "nestedA", wd / "folder2" / "nestedB") -os.copy(wd / "folder2" / "nestedA", wd / "folder2" / "nestedC") +os.list(wd / "folder2") ==> Seq(wd / "folder2/nestedA", wd / "folder2/nestedB") +os.copy(wd / "folder2/nestedA", wd / "folder2/nestedC") os.list(wd / "folder2") ==> Seq( - wd / "folder2" / "nestedA", - wd / "folder2" / "nestedB", - wd / "folder2" / "nestedC" + wd / "folder2/nestedA", + wd / "folder2/nestedB", + wd / "folder2/nestedC" ) os.read(wd / "File.txt") ==> "I am cow" @@ -889,9 +892,9 @@ Copy the given file or folder _into_ the destination folder [source,scala] ---- -os.list(wd / "folder1") ==> Seq(wd / "folder1" / "one.txt") +os.list(wd / "folder1") ==> Seq(wd / "folder1/one.txt") os.copy.into(wd / "File.txt", wd / "folder1") -os.list(wd / "folder1") ==> Seq(wd / "folder1" / "File.txt", wd / "folder1" / "one.txt") +os.list(wd / "folder1") ==> Seq(wd / "folder1/File.txt", wd / "folder1/one.txt") ---- ==== `os.copy.over` @@ -906,9 +909,9 @@ overwrite it instead of erroring out. [source,scala] ---- -os.list(wd / "folder2") ==> Seq(wd / "folder2" / "nestedA", wd / "folder2" / "nestedB") +os.list(wd / "folder2") ==> Seq(wd / "folder2/nestedA", wd / "folder2/nestedB") os.copy.over(wd / "folder1", wd / "folder2") -os.list(wd / "folder2") ==> Seq(wd / "folder2" / "one.txt") +os.list(wd / "folder2") ==> Seq(wd / "folder2/one.txt") ---- ==== `os.copy` with `mergeFolders` @@ -920,10 +923,10 @@ you can use the `mergeFolders` option of <>. [source,scala] ---- -os.list(wd / "folder1") ==> Seq(wd / "folder1" / "one.txt") -os.list(wd / "folder2") ==> Seq(wd / "folder2" / "nestedA", wd / "folder2" / "nestedB") +os.list(wd / "folder1") ==> Seq(wd / "folder1/one.txt") +os.list(wd / "folder2") ==> Seq(wd / "folder2/nestedA", wd / "folder2/nestedB") os.copy(wd / "folder1", wd / "folder2", mergeFolders = true) -os.list(wd / "folder2") ==> Seq(wd / "folder2" / "one.txt", wd / "folder2" / "nestedA", wd / "folder2" / "nestedB") +os.list(wd / "folder2") ==> Seq(wd / "folder2/one.txt", wd / "folder2/nestedA", wd / "folder2/nestedB") ---- ==== `os.makeDir` @@ -969,8 +972,8 @@ to error out in that case by passing in `acceptLinkedDirectory = false` [source,scala] ---- os.exists(wd / "new_folder") ==> false -os.makeDir.all(wd / "new_folder" / "inner" / "deep") -os.exists(wd / "new_folder" / "inner" / "deep") ==> true +os.makeDir.all(wd / "new_folder/inner/deep") +os.exists(wd / "new_folder/inner/deep") ==> true ---- ==== `os.remove` @@ -982,9 +985,9 @@ os.remove(target: Path, checkExists: Boolean = false): Boolean ---- Remove the target file or folder. Folders need to be empty to be removed; if you -want to remove a folder tree recursively, use <>. +want to remove a folder tree recursively, use <>. Returns `true` if the file was present before. -It will fail with an exception when the file is missing but `checkExists` is `true`, +It will fail with an exception when the file is missing but `checkExists` is `true`, or when the directory to remove is not empty. [source,scala] @@ -993,10 +996,10 @@ os.exists(wd / "File.txt") ==> true os.remove(wd / "File.txt") os.exists(wd / "File.txt") ==> false -os.exists(wd / "folder1" / "one.txt") ==> true -os.remove(wd / "folder1" / "one.txt") +os.exists(wd / "folder1/one.txt") ==> true +os.remove(wd / "folder1/one.txt") os.remove(wd / "folder1") -os.exists(wd / "folder1" / "one.txt") ==> false +os.exists(wd / "folder1/one.txt") ==> false os.exists(wd / "folder1") ==> false ---- @@ -1005,17 +1008,17 @@ destination: [source,scala] ---- -os.remove(wd / "misc" / "file-symlink") -os.exists(wd / "misc" / "file-symlink", followLinks = false) ==> false +os.remove(wd / "misc/file-symlink") +os.exists(wd / "misc/file-symlink", followLinks = false) ==> false os.exists(wd / "File.txt", followLinks = false) ==> true -os.remove(wd / "misc" / "folder-symlink") -os.exists(wd / "misc" / "folder-symlink", followLinks = false) ==> false +os.remove(wd / "misc/folder-symlink") +os.exists(wd / "misc/folder-symlink", followLinks = false) ==> false os.exists(wd / "folder1", followLinks = false) ==> true -os.exists(wd / "folder1" / "one.txt", followLinks = false) ==> true +os.exists(wd / "folder1/one.txt", followLinks = false) ==> true -os.remove(wd / "misc" / "broken-symlink") -os.exists(wd / "misc" / "broken-symlink", followLinks = false) ==> false +os.remove(wd / "misc/broken-symlink") +os.exists(wd / "misc/broken-symlink", followLinks = false) ==> false ---- If you wish to remove the destination of a symlink, use @@ -1025,7 +1028,7 @@ If you wish to remove the destination of a symlink, use [source,scala] ---- -os.remove.all(target: Path): Unit +os.remove.all(target: Path, ignoreErrors: Boolean = false): Unit ---- Remove the target file or folder; if it is a folder and not empty, recursively @@ -1033,9 +1036,9 @@ removing all it's contents before deleting it. [source,scala] ---- -os.exists(wd / "folder1" / "one.txt") ==> true +os.exists(wd / "folder1/one.txt") ==> true os.remove.all(wd / "folder1") -os.exists(wd / "folder1" / "one.txt") ==> false +os.exists(wd / "folder1/one.txt") ==> false os.exists(wd / "folder1") ==> false ---- @@ -1044,22 +1047,28 @@ destination: [source,scala] ---- -os.remove.all(wd / "misc" / "file-symlink") -os.exists(wd / "misc" / "file-symlink", followLinks = false) ==> false +os.remove.all(wd / "misc/file-symlink") +os.exists(wd / "misc/file-symlink", followLinks = false) ==> false os.exists(wd / "File.txt", followLinks = false) ==> true -os.remove.all(wd / "misc" / "folder-symlink") -os.exists(wd / "misc" / "folder-symlink", followLinks = false) ==> false +os.remove.all(wd / "misc/folder-symlink") +os.exists(wd / "misc/folder-symlink", followLinks = false) ==> false os.exists(wd / "folder1", followLinks = false) ==> true -os.exists(wd / "folder1" / "one.txt", followLinks = false) ==> true +os.exists(wd / "folder1/one.txt", followLinks = false) ==> true -os.remove.all(wd / "misc" / "broken-symlink") -os.exists(wd / "misc" / "broken-symlink", followLinks = false) ==> false +os.remove.all(wd / "misc/broken-symlink") +os.exists(wd / "misc/broken-symlink", followLinks = false) ==> false ---- If you wish to remove the destination of a symlink, use <>. +``os.remove.all`` removes nested files and folders one at a time, and any failure +in removing a file (e.g. due to permissions) or folder (e.g. due to someone concurrently +creating a file within it) causes an error to be thrown and terminates the removal early. +You can pass `ignoreErrors = false` to continue with the deletion of other files +even if some files or folders failed to be removed. + ==== `os.hardlink` [source,scala] @@ -1121,22 +1130,22 @@ Returns the immediate destination of the given symbolic link. [source,scala] ---- -os.readLink(wd / "misc" / "file-symlink") ==> os.up / "File.txt" -os.readLink(wd / "misc" / "folder-symlink") ==> os.up / "folder1" -os.readLink(wd / "misc" / "broken-symlink") ==> os.rel / "broken" -os.readLink(wd / "misc" / "broken-abs-symlink") ==> os.root / "doesnt" / "exist" +os.readLink(wd / "misc/file-symlink") ==> os.up / "File.txt" +os.readLink(wd / "misc/folder-symlink") ==> os.up / "folder1" +os.readLink(wd / "misc/broken-symlink") ==> os.rel / "broken" +os.readLink(wd / "misc/broken-abs-symlink") ==> os.root / "doesnt/exist" ---- Note that symbolic links can be either absolute ``os.Path``s or relative -``os.RelPath``s, represented by `os.FilePath`. You can also use `os.readLink.all` +``os.RelPath``s, represented by `os.FilePath`. You can also use `os.readLink.absolute` to automatically resolve relative symbolic links to their absolute destination: [source,scala] ---- -os.readLink.absolute(wd / "misc" / "file-symlink") ==> wd / "File.txt" -os.readLink.absolute(wd / "misc" / "folder-symlink") ==> wd / "folder1" -os.readLink.absolute(wd / "misc" / "broken-symlink") ==> wd / "misc" / "broken" -os.readLink.absolute(wd / "misc" / "broken-abs-symlink") ==> os.root / "doesnt" / "exist" +os.readLink.absolute(wd / "misc/file-symlink") ==> wd / "File.txt" +os.readLink.absolute(wd / "misc/folder-symlink") ==> wd / "folder1" +os.readLink.absolute(wd / "misc/broken-symlink") ==> wd / "misc/broken" +os.readLink.absolute(wd / "misc/broken-abs-symlink") ==> os.root / "doesnt/exist" ---- ==== `os.followLink` @@ -1152,9 +1161,9 @@ symbolic link in the given path is broken) [source,scala] ---- -os.followLink(wd / "misc" / "file-symlink") ==> Some(wd / "File.txt") -os.followLink(wd / "misc" / "folder-symlink") ==> Some(wd / "folder1") -os.followLink(wd / "misc" / "broken-symlink") ==> None +os.followLink(wd / "misc/file-symlink") ==> Some(wd / "File.txt") +os.followLink(wd / "misc/folder-symlink") ==> Some(wd / "folder1") +os.followLink(wd / "misc/broken-symlink") ==> None ---- ==== `os.temp` @@ -1212,6 +1221,249 @@ os.write(tempDir / "file", "Hello") os.list(tempDir) ==> Seq(tempDir / "file") ---- +=== Zip & Unzip Files + +==== `os.zip` + +[source,scala] +---- +def apply(dest: os.Path, + sources: Seq[ZipSource] = List(), + excludePatterns: Seq[Regex] = List(), + includePatterns: Seq[Regex] = List(), + preserveMtimes: Boolean = false, + deletePatterns: Seq[Regex] = List(), + compressionLevel: Int = -1 /* 0-9 */): os.Path +---- + +The zip object provides functionality to create or modify zip archives. It supports: + +- Zipping Files and Directories: You can zip both individual files and entire directories. +- Appending to Existing Archives: Files can be appended to an existing zip archive. +- Exclude Patterns (-x): You can specify files or patterns to exclude while zipping. +- Include Patterns (-i): You can include specific files or patterns while zipping. +- Delete Patterns (-d): You can delete specific files from an existing zip archive. +- Configuring whether or not to preserve filesyste mtimes and permissions + +This will create a new zip archive at `dest` containing `file1.txt` and everything +inside `sources`. If `dest` already exists as a zip, the files will be appended to the +existing zip, and any existing zip entries matching `deletePatterns` will be removed. + +Note that `os.zip` doesn't support creating/unpacking symlinks or filesystem permissions +in Zip files, because the underlying `java.util.zip.Zip*Stream` doesn't support them. + +===== Zipping Files and Folders + +The example below demonstrates the core workflows: creating a zip, appending to it, and +unzipping it: + +[source,scala] +---- +// Zipping files and folders in a new zip file +val zipFileName = "zip-file-test.zip" +val zipFile1: os.Path = os.zip( + destination = wd / zipFileName, + sourcePaths = Seq( + wd / "File.txt", + wd / "folder1" + ) +) + +// Adding files and folders to an existing zip file +os.zip( + destination = zipFile1, + sourcePaths = Seq( + wd / "folder2", + wd / "Multi Line.txt" + ) +) + +// Unzip file to a destination folder +val unzippedFolder = os.unzip( + source = wd / zipFileName, + destination = wd / "unzipped folder" +) + +val paths = os.walk(unzippedFolder) +val expected = Seq( + // Files get included in the zip root using their name + wd / "unzipped folder/File.txt", + wd / "unzipped folder/Multi Line.txt", + // Folder contents get included relative to the source root + wd / "unzipped folder/nestedA", + wd / "unzipped folder/nestedB", + wd / "unzipped folder/one.txt", + wd / "unzipped folder/nestedA/a.txt", + wd / "unzipped folder/nestedB/b.txt", +) +assert(paths.sorted == expected) +---- + +===== Renaming files in the zip + +You can also pass in a mapping to `os.zip` to specify exactly where in the zip each +input source file or folder should go: + +```scala +val zipFileName = "zip-file-test.zip" +val zipFile1: os.Path = os.zip( + destination = wd / zipFileName, + sourcePaths = List( + // renaming files and folders + wd / "File.txt" -> os.sub / "renamed-file.txt", + wd / "folder1" -> os.sub / "renamed-folder" + ) +) + +val unzippedFolder = os.unzip( + source = zipFile1, + destination = wd / "unzipped folder" +) + +val paths = os.walk(unzippedFolder) +val expected = Seq( + wd / "unzipped folder/renamed-file.txt", + wd / "unzipped folder/renamed-folder", + wd / "unzipped folder/renamed-folder/one.txt", +) +assert(paths.sorted == expected) +``` + +===== Excluding/Including Files in Zip + +You can specify files or folders to be excluded or included when creating the zip: + +[source,scala] +---- +os.zip( + os.Path("/path/to/destination.zip"), + List(os.Path("/path/to/folder")), + excludePatterns = List(".*\\.log".r, "temp/.*".r), // Exclude log files and "temp" folder + includePatterns = List(".*\\.txt".r) // Include only .txt files +) + +---- + +This will include only `.txt` files, excluding any `.log` files and anything inside +the `temp` folder. + +==== `os.zip.stream` + +You can use `os.zip.stream` to write the final zip to an `OutputStream` rather than a +concrete `os.Path`. `os.zip.stream` returns a `geny.Writable`, which has a `writeBytesToStream` +method: + +```scala +val zipFileName = "zipStreamFunction.zip" + +val stream = os.write.outputStream(wd / "zipStreamFunction.zip") + +val writable = zip.stream(sources = Seq(wd / "File.txt")) + +writable.writeBytesTo(stream) +stream.close() + +val unzippedFolder = os.unzip( + source = wd / zipFileName, + dest = wd / "zipStreamFunction" +) + +val paths = os.walk(unzippedFolder) +assert(paths == Seq(unzippedFolder / "File.txt")) +``` + +This can be useful for streaming the zipped data to places which are not files: +over the network, over a pipe, etc. + +==== `os.unzip` + +===== Unzipping Files +[source,scala] + +---- +os.unzip(os.Path("/path/to/archive.zip"), Some(os.Path("/path/to/destination"))) +---- + +This extracts the contents of `archive.zip` to the specified destination. + + +===== Excluding Files While Unzipping +You can exclude certain files from being extracted using patterns: + +[source,scala] +---- +os.unzip( + os.Path("/path/to/archive.zip"), + Some(os.Path("/path/to/destination")), + excludePatterns = List(".*\\.log".r, "temp/.*".r) // Exclude log files and the "temp" folder +) +---- + +===== `os.unzip.list` +You can list the contents of the zip file without extracting them: + +[source,scala] +---- +os.unzip.list(os.Path("/path/to/archive.zip")) +---- + +This will print all the file paths contained in the zip archive. + +==== `os.unzip.stream` + +You can unzip a zip file from any arbitrary `java.io.InputStream` containing its binary data +using the `os.unzip.stream` method: + +```scala +val readableZipStream: java.io.InputStream = ??? + +// Unzipping the stream to the destination folder +os.unzip.stream( + source = readableZipStream, + dest = unzippedFolder +) +``` + +This can be useful if the zip file does not exist on disk, e.g. if it is received over the network +or produced in-memory by application logic. + +OS-Lib also provides the `os.unzip.streamRaw` API, which is a lower level API used internally +within `os.unzip.stream` but can also be used directly if lower-level control is necessary. + +==== `os.zip.open` + +```scala +os.zip.open(path: Path): ZipRoot +``` + +`os.zip.open` allows you to treat zip files as filesystems, using normal `os.*` operations +on them. This provides a move flexible way to manipulate the contents of the zip in a fine-grained +manner when the normal `os.zip` or `os.unzip` operations do not suffice. + +```scala +val zipFile = os.zip.open(wd / "zip-test.zip") +try { + os.copy(wd / "File.txt", zipFile / "File.txt") + os.copy(wd / "folder1", zipFile / "folder1") + os.copy(wd / "folder2", zipFile / "folder2") +}finally zipFile.close() + +val zipFile2 = os.zip.open(wd / "zip-test.zip") +try{ + os.list(zipFile2) ==> Vector(zipFile2 / "File.txt", zipFile2 / "folder1", zipFile2 / "folder2") + os.remove.all(zipFile2 / "folder2") + os.remove(zipFile2 / "File.txt") +}finally zipFile2.close() + +val zipFile3 = os.zip.open(wd / "zip-test.zip") +try os.list(zipFile3) ==> Vector(zipFile3 / "folder1") +finally zipFile3.close() +``` + +`os.zip.open` returns a `ZipRoot`, which is identical to `os.Path` except it references the root +of the zip file rather than a bare path on the filesystem. Note that you need to call `ZipRoot#close()` +when you are done with it to avoid leaking filesystem resources. + === Filesystem Metadata ==== `os.stat` @@ -1261,9 +1513,9 @@ pass in `followLinks = false` to not do so. os.isFile(wd / "File.txt") ==> true os.isFile(wd / "folder1") ==> false -os.isFile(wd / "misc" / "file-symlink") ==> true -os.isFile(wd / "misc" / "folder-symlink") ==> false -os.isFile(wd / "misc" / "file-symlink", followLinks = false) ==> false +os.isFile(wd / "misc/file-symlink") ==> true +os.isFile(wd / "misc/folder-symlink") ==> false +os.isFile(wd / "misc/file-symlink", followLinks = false) ==> false ---- ==== `os.isDir` @@ -1281,9 +1533,9 @@ pass in `followLinks = false` to not do so. os.isDir(wd / "File.txt") ==> false os.isDir(wd / "folder1") ==> true -os.isDir(wd / "misc" / "file-symlink") ==> false -os.isDir(wd / "misc" / "folder-symlink") ==> true -os.isDir(wd / "misc" / "folder-symlink", followLinks = false) ==> false +os.isDir(wd / "misc/file-symlink") ==> false +os.isDir(wd / "misc/folder-symlink") ==> true +os.isDir(wd / "misc/folder-symlink", followLinks = false) ==> false ---- ==== `os.isLink` @@ -1298,8 +1550,8 @@ default, pass in `followLinks = false` to not do so. [source,scala] ---- -os.isLink(wd / "misc" / "file-symlink") ==> true -os.isLink(wd / "misc" / "folder-symlink") ==> true +os.isLink(wd / "misc/file-symlink") ==> true +os.isLink(wd / "misc/folder-symlink") ==> true os.isLink(wd / "folder1") ==> false ---- @@ -1335,12 +1587,12 @@ os.mtime(wd / "File.txt") ==> 0 os.mtime.set(wd / "File.txt", 90000) os.mtime(wd / "File.txt") ==> 90000 -os.mtime(wd / "misc" / "file-symlink") ==> 90000 +os.mtime(wd / "misc/file-symlink") ==> 90000 -os.mtime.set(wd / "misc" / "file-symlink", 70000) +os.mtime.set(wd / "misc/file-symlink", 70000) os.mtime(wd / "File.txt") ==> 70000 -os.mtime(wd / "misc" / "file-symlink") ==> 70000 -assert(os.mtime(wd / "misc" / "file-symlink", followLinks = false) != 40000) +os.mtime(wd / "misc/file-symlink") ==> 70000 +assert(os.mtime(wd / "misc/file-symlink", followLinks = false) != 40000) ---- === Filesystem Permissions @@ -1421,8 +1673,9 @@ os.owner.set(wd / "File.txt", originalOwner) === Spawning Subprocesses -Subprocess are spawned using `+os.proc(command: os.Shellable*).foo(...)+` calls, -where the `command: Shellable*` sets up the basic command you wish to run and +Subprocess are spawned using `+os.call(cmd: os.Shellable, ...)+` or +`+os.spawn(cmd: os.Shellable, ...)+` calls, +where the `cmd: Shellable` sets up the basic command you wish to run and `+.foo(...)+` specifies how you want to run it. `os.Shellable` represents a value that can make up part of your subprocess command, and the following values can be used as ``os.Shellable``s: @@ -1433,6 +1686,7 @@ be used as ``os.Shellable``s: * `os.RelPath` * `T: Numeric` * ``Iterable[T]``s of any of the above +* ``TupleN[T1, T2, ...Tn]``s of any of the above Most of the subprocess commands also let you redirect the subprocess's `stdin`/`stdout`/`stderr` streams via `os.ProcessInput` or `os.ProcessOutput` @@ -1446,8 +1700,11 @@ the subprocess via `os.SubProcess#stdin`, and if used on its stdout it lets the parent process read from the subprocess via `os.SubProcess#stdout` and `os.SubProcess#stderr`. * `os.Inherit`: inherits the stream from the parent process. This lets the -subprocess read directly from the paren process's standard input or write -directly to the parent process's standard output or error +subprocess read directly from the parent process's standard input or write +directly to the parent process's standard output or error. `os.Inherit` +can be redirected on a threadlocal basis via `os.Inherit.in`, `.out`, or `.err`. +* `os.InheritRaw`: identical to `os.Inherit`, but without being affected by +redirects. * `os.Path`: connects the subprocess's stream to the given filesystem path, reading its standard input from a file or writing its standard output/error to the file. @@ -1461,12 +1718,12 @@ Often, if you are only interested in capturing the standard output of the subprocess but want any errors sent to the console, you might set `stderr = os.Inherit` while leaving `stdout = os.Pipe`. -==== `os.proc.call` +==== `os.call` [source,scala] ---- -os.proc(command: os.Shellable*) - .call(cwd: Path = null, +os.call(cmd: os.Shellable, + cwd: Path = null, env: Map[String, String] = null, stdin: ProcessInput = Pipe, stdout: ProcessOutput = Pipe, @@ -1474,9 +1731,13 @@ os.proc(command: os.Shellable*) mergeErrIntoOut: Boolean = false, timeout: Long = Long.MaxValue, check: Boolean = true, - propagateEnv: Boolean = true): os.CommandResult + propagateEnv: Boolean = true, + shutdownGracePeriod: Long = 100, + destroyOnExit: Boolean = true): os.CommandResult ---- +_Also callable via `os.proc(cmd).call(...)`_ + Invokes the given subprocess like a function, passing in input and returning a `CommandResult`. You can then call `result.exitCode` to see how it exited, or `result.out.bytes` or `result.err.string` to access the aggregated stdout and @@ -1487,6 +1748,8 @@ is run: * `cwd`: the working directory of the subprocess * `env`: any additional environment variables you wish to set in the subprocess + in addition to those passed via `propagateEnv`. You can also set their values + to `null` to remove specific variables. * `stdin`: any data you wish to pass to the subprocess's standard input * `stdout`/`stderr`: these are ``os.Redirect``s that let you configure how the processes output/error streams are configured. @@ -1502,7 +1765,7 @@ Note that redirecting `stdout`/`stderr` elsewhere means that the respective [source,scala] ---- -val res = os.proc('ls, wd/"folder2").call() +val res = os.call(cmd = ('ls, wd/"folder2")) res.exitCode ==> 0 @@ -1525,13 +1788,13 @@ res.out.bytes // Non-zero exit codes throw an exception by default val thrown = intercept[os.SubprocessException]{ - os.proc('ls, "doesnt-exist").call(cwd = wd) + os.call(cmd = ('ls, "doesnt-exist"), cwd = wd) } assert(thrown.result.exitCode != 0) // Though you can avoid throwing by setting `check = false` -val fail = os.proc('ls, "doesnt-exist").call(cwd = wd, check = false) +val fail = os.call(cmd = ('ls, "doesnt-exist"), cwd = wd, check = false) assert(fail.exitCode != 0) @@ -1541,11 +1804,11 @@ fail.out.text() ==> "" assert(fail.err.text().contains("No such file or directory")) // You can pass in data to a subprocess' stdin -val hash = os.proc("shasum", "-a", "256").call(stdin = "Hello World") +val hash = os.call(cmd = ("shasum", "-a", "256"), stdin = "Hello World") hash.out.trim() ==> "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b277d9ad9f146e -" // Taking input from a file and directing output to another file -os.proc("base64").call(stdin = wd / "File.txt", stdout = wd / "File.txt.b64") +os.call(cmd = ("base64"), stdin = wd / "File.txt", stdout = wd / "File.txt.b64") os.read(wd / "File.txt.b64") ==> "SSBhbSBjb3c=" ---- @@ -1564,7 +1827,8 @@ of `os.proc.call` in a streaming fashion, either on groups of bytes: [source,scala] ---- var lineCount = 1 -os.proc('find, ".").call( +os.call( + cmd = ('find, "."), cwd = wd, stdout = os.ProcessOutput( (buf, len) => lineCount += buf.slice(0, len).count(_ == '\n') @@ -1578,7 +1842,8 @@ Or on lines of output: ---- lineCount ==> 22 var lineCount = 1 -os.proc('find, ".").call( +os.call( + cmd = ('find, "."), cwd = wd, stdout = os.ProcessOutput.Readlines( line => lineCount += 1 @@ -1587,21 +1852,25 @@ os.proc('find, ".").call( lineCount ==> 22 ---- -==== `os.proc.spawn` +==== `os.spawn` [source,scala] ---- -os.proc(command: os.Shellable*) - .spawn(cwd: Path = null, +os.spawn(cmd: os.Shellable, + cwd: Path = null, env: Map[String, String] = null, stdin: os.ProcessInput = os.Pipe, stdout: os.ProcessOutput = os.Pipe, stderr: os.ProcessOutput = os.Pipe, mergeErrIntoOut: Boolean = false, - propagateEnv: Boolean = true): os.SubProcess + propagateEnv: Boolean = true, + shutdownGracePeriod: Long = 100, + destroyOnExit: Boolean = true): os.SubProcess ---- -The most flexible of the `os.proc` calls, `os.proc.spawn` simply configures and +_Also callable via `os.proc(cmd).spawn(...)`_ + +The most flexible of the `os.proc` calls, `os.spawn` simply configures and starts a subprocess, and returns it as a `os.SubProcess`. `os.SubProcess` is a simple wrapper around `java.lang.Process`, which provides `stdin`, `stdout`, and `stderr` streams for you to interact with however you like. e.g. You can sending @@ -1613,10 +1882,7 @@ as the stdin of a second spawned process. Note that if you provide `ProcessOutput` callbacks to `stdout`/`stderr`, the calls to those callbacks take place on newly spawned threads that execute in parallel with the main thread. Thus make sure any data processing you do in -those callbacks is thread safe! For simpler cases, it may be easier to use -`os.proc.stream` which triggers it's `onOut`/`onErr` callbacks -all on the calling thread, avoiding needing to think about multithreading and -concurrency issues. +those callbacks is thread safe! `stdin`, `stdout` and `stderr` are ``java.lang.OutputStream``s and ``java.lang.InputStream``s enhanced with the `.writeLine(s: String)`/`.readLine()` @@ -1625,8 +1891,10 @@ methods for easy reading and writing of character and line-based data. [source,scala] ---- // Start a long-lived python process which you can communicate with -val sub = os.proc("python", "-u", "-c", "while True: print(eval(raw_input()))") - .spawn(cwd = wd) +val sub = os.spawn( + cmd = ("python", "-u", "-c", "while True: print(eval(raw_input()))"), + cwd = wd +) // Sending some text to the subprocess sub.stdin.write("1 + 2") @@ -1648,12 +1916,25 @@ sub.stdout.read() ==> '8'.toByte sub.destroy() // You can chain multiple subprocess' stdin/stdout together -val curl = os.proc("curl", "-L" , "https://git.io/fpfTs").spawn(stderr = os.Inherit) -val gzip = os.proc("gzip", "-n").spawn(stdin = curl.stdout) -val sha = os.proc("shasum", "-a", "256").spawn(stdin = gzip.stdout) +val curl = os.spawn(cmd = ("curl", "-L" , "https://git.io/fpfTs"), stderr = os.Inherit) +val gzip = os.spawn(cmd = ("gzip", "-n"), stdin = curl.stdout) +val sha = os.spawn(cmd = ("shasum", "-a", "256"), stdin = gzip.stdout) sha.stdout.trim ==> "acc142175fa520a1cb2be5b97cbbe9bea092e8bba3fe2e95afa645615908229e -" ---- +==== Customizing the default environment + +Client-server CLI applications sometimes want to run subprocesses on the server based on the environment of the client. +It is possible to customize the default environment passed to subprocesses by setting the `os.SubProcess.env` threadlocal: + +[source,scala] +---- +val clientEnvironment: Map[String, String] = ??? +os.SubProcess.env.withValue(clientEnvironment) { + os.call(command) // clientEnvironment is passed by default instead of the system environment +} +---- + == Spawning Pipelines of Subprocesses After constructing a subprocess with `os.proc`, you can use the `pipeTo` method @@ -1682,13 +1963,13 @@ val yes10 = os.proc("yes") ---- This feature is implemented inside the library and will terminate any process reading the -stdin of other process in pipeline on every IO error. This behavior can be disabled via the -`handleBrokenPipe` flag on `call` and `spawn` methods. Note that Windows does not support -broken pipe behaviour, so a command like`yes` would run forever. `handleBrokenPipe` is set +stdin of other process in pipeline on every IO error. This behavior can be disabled via the +`handleBrokenPipe` flag on `call` and `spawn` methods. Note that Windows does not support +broken pipe behaviour, so a command like`yes` would run forever. `handleBrokenPipe` is set to false by default on Windows. Both `call` and `spawn` correspond in their behavior to their counterparts in the `os.proc`, -but `spawn` returns the `os.ProcessPipeline` instance instead. It offers the same +but `spawn` returns the `os.ProcessPipeline` instance instead. It offers the same `API` as `SubProcess`, but will operate on the set of processes instead of a single one. `Pipefail` is enabled by default, so if any of the processes in the pipeline fails, the whole @@ -1744,15 +2025,15 @@ Here is an example of use from the Ammonite REPL: @ os.watch.watch(Seq(os.pwd / "out"), paths => println("paths changed: " + paths.mkString(", "))) -@ os.write(os.pwd / "out" / "i am", "cow") +@ os.write(os.pwd / "out/i am", "cow") paths changed: /Users/lihaoyi/Github/Ammonite/out/i am -@ os.move(os.pwd / "out" / "i am", os.pwd / "out" / "hear me") +@ os.move(os.pwd / "out/i am", os.pwd / "out/hear me") paths changed: /Users/lihaoyi/Github/Ammonite/out/i am,/Users/lihaoyi/Github/Ammonite/out/hear me -@ os.remove.all(os.pwd / "out" / "version") +@ os.remove.all(os.pwd / "out/version") paths changed: /Users/lihaoyi/Github/Ammonite/out/version/log,/Users/lihaoyi/Github/Ammonite/out/version/meta.json,/Users/lihaoyi/Github/Ammonite/out/version ---- @@ -1786,14 +2067,20 @@ Absolute paths can be created in a few ways: // from the pwd) is called `wd` val wd = os.pwd -// A path nested inside `wd` +// A path nested inside `wd` in multiple segments wd / "folder" / "file" +// The RHS of `/` can have multiple segments if-and-only-if it is a literal string +wd / "folder/file" + +// Literal syntax for absolute `os.Path` +val p: os.Path = "/folder/file" + // A path starting from the root -os.root / "folder" / "file" +os.root / "folder/file" // A path with spaces or other special characters -wd / "My Folder" / "My File.txt" +wd / "My Folder/My File.txt" // Up one level from the wd wd / os.up @@ -1802,8 +2089,18 @@ wd / os.up wd / os.up / os.up ---- -Note that there are no in-built operations to change the `os.pwd`. In general, -you should not need to: simply defining a new path, e.g. +When constructing ``os.Path``s, the right-hand-side of the `/` operator must be either a non-literal +a string expression containing a single path segment or a literal string containing one-or-more +path segments. If a non-literal string expression on the RHS contains multiple segments, you need +to wrap the RHS in an explicit `os.RelPath(...)` or `os.SubPath(...)` constructor to tell OS-Lib +how to interpret it. The single-segment limitation is intended to avoid the developer accidentally +introducing https://en.wikipedia.org/wiki/Directory_traversal_attack[Directory Traversal Attacks] +or other related bugs when naively constructing paths out of dynamic and potentially untrusted +inputs, which is not an issue for literal string since the string value is directly written in +the source code and immediately visible. + +`os.pwd` can be modified in certain scopes via the `os.dynamicPwd` dynamic variable, but +best practice is not to change it. Instead simply define a new path, e.g. [source,scala] ---- @@ -1837,16 +2134,19 @@ before the relative path is applied. They can be created in the following ways: [source,scala] ---- -// The path "folder/file" +// The path "folder/file" in multiple segments val rel1 = os.rel / "folder" / "file" -val rel2 = os.rel / "folder" / "file" +// RHS of `/` can have multiple segments if-and-only-if it is a literal string +val rel2 = os.rel / "folder/file" +// Literal syntax for `os.RelPath` +val rel3: os.RelPath = "folder/file" // The path "file" -val rel3 = os.rel / "file" +val rel4 = os.rel / "file" // The relative difference between two paths -val target = os.pwd / "target" / "file" -assert((target.relativeTo(os.pwd)) == os.rel / "target" / "file") +val target = os.pwd / "target/file" +assert((target.relativeTo(os.pwd)) == os.rel / "target/file") // `up`s get resolved automatically val minus = os.pwd.relativeTo(target) @@ -1859,17 +2159,17 @@ combined with absolute paths in order to create new absolute paths. e.g. [source,scala] ---- -val target = os.pwd / "target" / "file" +val target = os.pwd / "target/file" val difference = target.relativeTo(os.pwd) -val newBase = os.root / "code" / "server" -assert(newBase / difference == os.root / "code" / "server" / "target" / "file") +val newBase = os.root / "code/server" +assert(newBase / difference == os.root / "code/server/target/file") ---- `os.up` is a relative path that comes in-built: [source,scala] ---- -val target = os.root / "target" / "file" +val target = os.root / "target/file" assert(target / os.up == os.root / "target") ---- @@ -1878,10 +2178,10 @@ canonical manner: [source,scala] ---- -assert((os.root / "folder" / "file" / os.up).toString == "/folder") +assert((os.root / "folder/file" / os.up).toString == "/folder") // not "/folder/file/.." -assert((os.rel / "folder" / "file" / os.up).toString == "folder") +assert((os.rel / "folder/file" / os.up).toString == "folder") // not "folder/file/.." ---- @@ -1902,31 +2202,34 @@ They can be created in the following ways: [source,scala] ---- -// The path "folder/file" +// The path "folder/file" in multiple segments val sub1 = os.sub / "folder" / "file" -val sub2 = os.sub / "folder" / "file" +// RHS of `/` can have multiple segments if-and-only-if it is a literal string +val sub2 = os.sub / "folder/file" +// Literal syntax for `os.SubPath` +val sub2: os.Subpath = "folder/file" // The relative difference between two paths -val target = os.pwd / "out" / "scratch" / "file" -assert((target subRelativeTo os.pwd) == os.sub / "out" / "scratch" / "file") +val target = os.pwd / "out/scratch/file" +assert((target subRelativeTo os.pwd) == os.sub / "out/scratch/file") // Converting os.RelPath to os.SubPath -val rel3 = os.rel / "folder" / "file" -val sub3 = rel3.asSubPath +val rel3 = os.rel / "folder/file" +val sub4 = rel3.asSubPath ---- ``os.SubPath``s are useful for representing paths within a particular folder or directory. You can combine them with absolute ``os.Path``s to -resolve paths within them, without needing to worry about https://en.wikipedia.org/wiki/Directory_traversal_attack[Directory -Traversal Attacks] +resolve paths within them, without needing to worry about +https://en.wikipedia.org/wiki/Directory_traversal_attack[Directory Traversal Attacks] du to accidentally accessing paths outside the destination folder. [source,scala] ---- -val target = os.pwd / "target" / "file" +val target = os.pwd / "target/file" val difference = target.relativeTo(os.pwd) -val newBase = os.root / "code" / "server" -assert(newBase / difference == os.root / "code" / "server" / "target" / "file") +val newBase = os.root / "code/server" +assert(newBase / difference == os.root / "code/server/target/file") ---- Attempting to construct an `os.SubPath` with `..` segments results in an @@ -1934,7 +2237,7 @@ exception being thrown: [source,scala] ---- -val target = os.pwd / "out" / "scratch" / +val target = os.pwd / "out/scratch" / // `up`s are not allowed in sub paths intercept[Exception](os.pwd subRelativeTo target) @@ -1974,10 +2277,10 @@ val relStr = "hello/cow/world/.." val absStr = "/hello/world" assert( - RelPath(relStr) == "hello" / "cow", + RelPath(relStr) == "hello/cow", // Path(...) also allows paths starting with ~, // which is expanded to become your home directory - Path(absStr) == os.root / "hello" / "world" + Path(absStr) == os.root / "hello/world" ) // You can also pass in java.io.File and java.nio.file.Path @@ -1986,9 +2289,9 @@ val relIoFile = new java.io.File(relStr) val absNioFile = java.nio.file.Paths.get(absStr) assert( - RelPath(relIoFile) == "hello" / "cow", - Path(absNioFile) == os.root / "hello" / "world", - Path(relIoFile, root / "base") == os.root / "base" / "hello" / "cow" + RelPath(relIoFile) == "hello/cow", + Path(absNioFile) == os.root / "hello/world", + Path(relIoFile, root / "base") == os.root / "base/hello/cow" ) ---- @@ -2022,8 +2325,8 @@ parse it : val relStr = "hello/cow/world/.." val absStr = "/hello/world" assert( - FilePath(relStr) == "hello" / "cow", - FilePath(absStr) == os.root / "hello" / "world" + FilePath(relStr) == "hello/cow", + FilePath(absStr) == os.root / "hello/world" ) ---- @@ -2041,9 +2344,9 @@ val relStr = "hello/cow/world/.." val absStr = "/hello/world" val basePath: FilePath = FilePath(relStr) assert( - os.Path(relStr, os.root / "base") == os.root / "base" / "hello" / "cow", - os.Path(absStr, os.root / "base") == os.root / "hello" / "world", - os.Path(basePath, os.root / "base") == os.root / "base" / "hello" / "cow", + os.Path(relStr, os.root / "base") == os.root / "base/hello/cow", + os.Path(absStr, os.root / "base") == os.root / "hello/world", + os.Path(basePath, os.root / "base") == os.root / "base/hello/cow", os.Path(".", os.pwd).last != "" ) ---- @@ -2064,14 +2367,14 @@ explicitly choose to convert relative paths to absolute using some base. ==== Roots and filesystems -If you are using a system that supports different roots of paths, e.g. Windows, -you can use the argument of `os.root` to specify which root you want to use. +If you are using a system that supports different roots of paths, e.g. Windows, +you can use the argument of `os.root` to specify which root you want to use. If not specified, the default root will be used (usually, C on Windows, / on Unix). [source,scala] ---- -val root = os.root('C:\') / "Users" / "me" -assert(root == os.Path("C:\Users\me")) +val root = os.root("C:\\") / "Users/me" +assert(root == os.Path("C:\\Users\\me")) ---- Additionally, custom filesystems can be specified by passing a `FileSystem` to @@ -2087,11 +2390,11 @@ val fs = FileSystems.newFileSystem(uri, env); val path = os.root("/", fs) / "dir" ---- -Note that the jar file system operations suchs as writing to a file are supported -only on JVM 11+. Depending on the filesystem, some operations may not be supported - -for example, running an `os.proc` with pwd in a jar file won't work. You may also -meet limitations imposed by the implementations - in jar file system, the files are -created only after the file system is closed. Until that, the ones created in your +Note that the jar file system operations suchs as writing to a file are supported +only on JVM 11+. Depending on the filesystem, some operations may not be supported - +for example, running an `os.proc` with pwd in a jar file won't work. You may also +meet limitations imposed by the implementations - in jar file system, the files are +created only after the file system is closed. Until that, the ones created in your program are kept in memory. ==== `os.ResourcePath` @@ -2103,7 +2406,7 @@ default, the path used to load resources is absolute, using the [source,scala] ---- -val contents = os.read(os.resource / "test" / "ammonite" / "ops" / "folder" / "file.txt") +val contents = os.read(os.resource / "test/ammonite/ops/folder/file.txt") assert(contents.contains("file contents lols")) ---- @@ -2112,7 +2415,7 @@ You can also pass in a classloader explicitly to the resource call: [source,scala] ---- val cl = getClass.getClassLoader -val contents2 = os.read(os.resource(cl)/ "test" / "ammonite" / "ops" / "folder" / "file.txt") +val contents2 = os.read(os.resource(cl)/ "test/ammonite/ops/folder/file.txt") assert(contents2.contains("file contents lols")) ---- @@ -2123,10 +2426,10 @@ current class. [source,scala] ---- val cls = classOf[test.os.Testing] -val contents = os.read(os.resource(cls) / "folder" / "file.txt") +val contents = os.read(os.resource(cls) / "folder/file.txt") assert(contents.contains("file contents lols")) -val contents2 = os.read(os.resource(getClass) / "folder" / "file.txt") +val contents2 = os.read(os.resource(getClass) / "folder/file.txt") assert(contents2.contains("file contents lols")) ---- @@ -2158,9 +2461,9 @@ By default, the following types of values can be used where-ever ``os.Source``s are required: * Any `geny.Writable` data type: - ** `Array[Byte]` - ** `java.lang.String` (these are treated as UTF-8) - ** `java.io.InputStream` +** `Array[Byte]` +** `java.lang.String` (these are treated as UTF-8) +** `java.io.InputStream` * `java.nio.channels.SeekableByteChannel` * Any `TraversableOnce[T]` of the above: e.g. `Seq[String]`, `List[Array[Byte]]`, etc. @@ -2221,6 +2524,95 @@ string, int or set representations of the `os.PermSet` via: == Changelog +=== 0.11.4 + +* Add ability to instrument path based operations using hooks https://github.com/com-lihaoyi/os-lib/pull/325[#325] +* Add compile-time validation of literal paths containing ".." https://github.com/com-lihaoyi/os-lib/pull/329[#329] +* Add literal string syntax for `os.Path`, `os.SubPath`, and `os.RelPath` https://github.com/com-lihaoyi/os-lib/pull/353[#353] + +[#0-11-3] +=== 0.11.3 + +* `SubProcess` spawning operations now take an `destroyOnExit = true` flag to try and shut them + down when the host JVM exits, `SubProcess#destroy` now takes a configurable + `(shutdownGracePeriod: Long, async: Boolean)` flags to configure the behavior (superseding + the old `destroy()`/`destroyForcibly()` methods), and `timeoutGracePeriod` has been renamed to + `shutdownGracePeriod` https://github.com/com-lihaoyi/os-lib/pull/324[#324] + +[#0-11-2] +=== 0.11.2 + +* Use `java.nio.files.Files.newOutputStream` instead of `java.io.FileOutputStream` to + try and avoid problems with windows open file deletion https://github.com/com-lihaoyi/os-lib/pull/323[#323] + +[#0-11-1] +=== 0.11.1 + +* Propagate content length from filesystem through `geny.Writable` and `os.Source` +https://github.com/com-lihaoyi/os-lib/pull/320[#320] + +[#0-11-0] +=== 0.11.0 + +* Added APIs to <> via `os.zip`, `os.unzip`, `os.zip.stream`, `os.unzip.stream`, +`os.unzip.list`, `os.unzip.streamRaw`, `os.zip.open` https://github.com/com-lihaoyi/os-lib/pull/317[#317] + +* Minimum officially supported Java version raised from 8 to 11 + +[#0-10-7] +=== 0.10.7 + +* Allow multi-segment paths segments for literals https://github.com/com-lihaoyi/os-lib/pull/297: You +can now write `os.pwd / "foo/bar/qux"` rather than `os.pwd / "foo" / "bar" / "qux"`. Note that this +is only allowed for string literals, and non-literal path segments still need to be wrapped e.g. +`def myString = "foo/bar/qux"; os.pwd / os.SubPath(myString)` for security and safety purposes + +[#0-10-6] +=== 0.10.6 + +* Make `os.pwd` modifiable via the `os.dynamicPwd` dynamic variable https://github.com/com-lihaoyi/os-lib/pull/298 + +[#0-10-5] +=== 0.10.5 + +* Introduce `os.SubProcess.env` `DynamicVariable` to override default `env` +(https://github.com/com-lihaoyi/os-lib/pull/295) + + +[#0-10-4] +=== 0.10.4 + +* Add a lightweight syntax for `os.call()` and `os.spawn` APIs +(https://github.com/com-lihaoyi/os-lib/pull/292) +* Add a configurable grace period when subprocesses timeout and have to +be terminated to give a chance for shutdown logic to run +(https://github.com/com-lihaoyi/os-lib/pull/286) + +[#0-10-3] +=== 0.10.3 + +* `os.Inherit` now can be redirected on a threadlocal basis via `os.Inherit.in`, `.out`, or `.err`. +`os.InheritRaw` is available if you do not want the redirects to take effect + + +[#0-10-2] +=== 0.10.2 + +* Support `os.proc` on Scala Native (https://github.com/com-lihaoyi/os-lib/pull/257) + +[#0-10-1] +=== 0.10.1 + +* Fix `os.copy` and `os.move` directories to root (#267) + +[#0-10-0] +=== 0.10.0 + +* Support for Scala-Native 0.5.0 +* Dropped support for Scala 2.11.x +* Minimum version of Scala 3 increased to 3.3.1 + + [#0-9-3] === 0.9.3 - 2024-01-01 @@ -2228,6 +2620,7 @@ string, int or set representations of the `os.PermSet` via: * Fix propagateEnv = false to not propagate env (#238) * Make os.home a def (#239) +[#0-9-2] === 0.9.2 - 2023-11-05 * Added new convenience API to create pipes between processes with `.pipeTo` @@ -2238,11 +2631,13 @@ string, int or set representations of the `os.PermSet` via: * Dependency updates: sourcecode 0.3.1 * Tooling updates: acyclic 0.3.9, Mill 0.11.5, mill-mima 0.0.24, mill-vcs-version 0.4.0, scalafmt 3.7.15 +[#0-9-1] === 0.9.1 - 2023-03-07 * Refined return types when constructing paths with `/` and get rid of long `ThisType#ThisType` cascades. * Added a new `PathConvertible` to support `URI`s when constructing paths. +[#0-9-0] === 0.9.0 - 2022-11-28 * `os.proc` now also supports `CharSequence(s)` as `Shellable` @@ -2252,64 +2647,79 @@ string, int or set representations of the `os.PermSet` via: * Enabled MiMa checks to CI setup and officially support early semantic versioning since this release * Documentation improvements + +=== Older releases +:leveloffset: +1 + +[discrete] === 0.8.1 - 2022-01-31 * Added support for Scala Native on Scala 3 +[discrete] === 0.8.0 - 2021-12-11 * Avoid throwing an exception when sorting identical paths {link-oslib}/pull/90[#90] * Make `os.remove` behave more like `Files.deleteIfExists` {link-oslib}/pull/89[#89] * Make `.ext` on empty paths return `""` rather than crashing {link-oslib}/pull/87[#87] +[discrete] === 0.7.8 - 2021-05-27 * Restored binary compatibility in `os.copy` and `os.copy.into` to os-lib versions before 0.7.5 +[discrete] === 0.7.7 - 2021-05-14 * Add support for Scala 3.0.0 +[discrete] === 0.7.6 - 2021-04-28 * Add support for Scala 3.0.0-RC3 +[discrete] === 0.7.5 - 2021-04-21 * Re-added support for Scala 2.11 * Added new option `mergeFolders` to `os.copy` * os.copy now honors `followLinks` when copying symbolic links to directories +[discrete] === 0.7.4 * Add support for Scala 3.0.0-RC2 +[discrete] === 0.7.3 * Add support for Scala 3.0.0-RC1 * Migration of the CI system from Travis CI to GitHub Actions +[discrete] === 0.7.2 * Add support for Scala 3.0.0-M3 +[discrete] === 0.7.1 * Improve performance of `os.write` by buffering output stream to files -=== Older versions - -==== 0.6.2 +[discrete] +=== 0.6.2 * Moved the `os.Bytes`, `os.StreamValue` (now named `ByteData`) interfaces into `geny` package, for sharing with Requests-Scala * Add `os.read.stream` function, that returns a `geny.Readable` -==== 0.5.0 +[discrete] +=== 0.5.0 * `os.Source` now supports any data type that is `geny.Writable` -==== 0.4.2 +[discrete] +=== 0.4.2 * Added a new <> data type, for safer handling of sub-paths within a directory. @@ -2331,20 +2741,24 @@ by default {link-oslib}/issues/15[#15], use * `os.stat.full` has been superseded by `os.stat` and `os.stat.posix` * Removed `os.BasicStatInfo`, which has been superseded by `os.StatInfo` -==== 0.3.0 +[discrete] +=== 0.3.0 * Support for Scala 2.13.0 final -==== 0.2.8 +[discrete] +=== 0.2.8 * `os.ProcessOutput` trait is no longer sealed -==== 0.2.7 +[discrete] +=== 0.2.7 * Narrow return type of `readLink.absolute` from `FilePath` to `Path` * Fix handling of standaline `\r` in `os.SubProcess#stdout.readLine` -==== 0.2.6 +[discrete] +=== 0.2.6 * Remove `os.StatInfo#name`, `os.BasicStatInfo#name` and `os.FullStatInfo#name`, since it is just the last path segment of the stat call and doesn't properly @@ -2354,7 +2768,8 @@ to the `skip` predicate. * Add `os.BasePath#baseName`, which returns the section of the path before the `os.BasePath#ext` extension. -==== 0.2.5 +[discrete] +=== 0.2.5 * New `os.readLink`/`os.readLink.absolute` methods to read the contents of symbolic links without dereferencing them. @@ -2378,10 +2793,12 @@ channels * Reversed the order of arguments in `os.symlink` and `os.hardlink`, to match the order of the underlying java NIO functions. -==== 0.2.2 +[discrete] +=== 0.2.2 * Allow chaining of multiple subprocesses `stdin`/`stdout` -==== 0.2.0 +[discrete] +=== 0.2.0 * First release diff --git a/build.mill b/build.mill new file mode 100644 index 00000000..15734069 --- /dev/null +++ b/build.mill @@ -0,0 +1,225 @@ +package build +// plugins +import $ivy.`de.tototec::de.tobiasroeser.mill.vcs.version::0.4.0` +import $ivy.`com.github.lolgab::mill-mima::0.1.1` + +// imports +import mill._, scalalib._, scalanativelib._, publish._ +import mill.scalalib.api.ZincWorkerUtil +import com.github.lolgab.mill.mima._ +import de.tobiasroeser.mill.vcs.version.VcsVersion + +val communityBuildDottyVersion = sys.props.get("dottyVersion").toList + +val scala213Version = "2.13.14" + +val scalaVersions = Seq( + "3.7.0-RC1", + "2.12.17", + scala213Version +) ++ communityBuildDottyVersion + +object Deps { + val acyclic = ivy"com.lihaoyi:::acyclic:0.3.16" + val jna = ivy"net.java.dev.jna:jna:5.15.0" + val geny = ivy"com.lihaoyi::geny::1.1.1" + val sourcecode = ivy"com.lihaoyi::sourcecode::0.4.2" + val utest = ivy"com.lihaoyi::utest::0.8.4" + val expecty = ivy"com.eed3si9n.expecty::expecty::0.16.0" + def scalaReflect(scalaVersion: String) = ivy"org.scala-lang:scala-reflect:$scalaVersion" + def scalaLibrary(version: String) = ivy"org.scala-lang:scala-library:${version}" +} + +trait AcyclicModule extends ScalaModule { + def acyclicDep: T[Agg[Dep]] = T { + Agg.from(Option.when(!ZincWorkerUtil.isScala3(scalaVersion()))(Deps.acyclic)) + } + def acyclicOptions: T[Seq[String]] = T { + Option.when(!ZincWorkerUtil.isScala3(scalaVersion()))("-P:acyclic:force").toSeq + } + def compileIvyDeps = acyclicDep + def scalacPluginIvyDeps = acyclicDep + def scalacOptions = super.scalacOptions() ++ acyclicOptions() ++ + Agg.when(scalaVersion().startsWith("3"))("-experimental") +} + +trait SafeDeps extends ScalaModule { + def mapDependencies: Task[coursier.Dependency => coursier.Dependency] = T.task { + val sd = Deps.scalaLibrary(scala213Version) + super.mapDependencies().andThen { d => + // enforce up-to-date Scala 2.13.x version + if (d.module == sd.dep.module && d.version.startsWith("2.13.")) sd.dep + else d + } + } +} + +trait MiMaChecks extends Mima { + def mimaPreviousVersions = + Seq( + "0.9.0", + "0.9.1", + "0.9.2", + "0.9.3", + "0.10.0", + "0.10.1", + "0.10.2", + "0.10.3", + "0.10.4", + "0.10.5", + "0.10.6" + ) + override def mimaBinaryIssueFilters: T[Seq[ProblemFilter]] = Seq( + ProblemFilter.exclude[ReversedMissingMethodProblem]("os.PathConvertible.isCustomFs"), + // this is fine, because ProcessLike is sealed (and its subclasses should be final) + ProblemFilter.exclude[ReversedMissingMethodProblem]("os.ProcessLike.joinPumperThreadsHook") + ) + override def mimaExcludeAnnotations: T[Seq[String]] = Seq( + "os.experimental" + ) +} + +trait OsLibModule + extends CrossScalaModule + with PublishModule + with AcyclicModule + with SafeDeps + with PlatformScalaModule { outer => + + def scalacPluginIvyDeps = super.scalacPluginIvyDeps() ++ Agg.when(scalaVersion().startsWith("2"))(ivy"com.lihaoyi::unroll-plugin:0.1.12") + def ivyDeps = super.ivyDeps() ++ Agg.when(scalaVersion().startsWith("2"))(ivy"com.lihaoyi::unroll-annotation:0.1.12") + def publishVersion = VcsVersion.vcsState().format() + def pomSettings = PomSettings( + description = artifactName(), + organization = "com.lihaoyi", + url = "https://github.com/com-lihaoyi/os-lib", + licenses = Seq(License.MIT), + versionControl = VersionControl.github( + owner = "com-lihaoyi", + repo = "os-lib" + ), + developers = Seq( + Developer("lihaoyi", "Li Haoyi", "https://github.com/lihaoyi") + ) + ) + + trait OsLibTestModule extends ScalaModule with TestModule.Utest with SafeDeps { + def ivyDeps = super.ivyDeps() ++ Agg(Deps.utest, Deps.sourcecode) + // we check the textual output of system commands and expect it in english + def forkEnv = super.forkEnv() ++ Map( + "LC_ALL" -> "C", + "TEST_SUBPROCESS_ENV" -> "value", + "OS_TEST_RESOURCE_FOLDER" -> os.jvm(crossValue).test.resources().head.path.toString + ) + def scalacOptions = super.scalacOptions() ++ + Agg.when(scalaVersion().startsWith("3"))("-experimental") + } +} + +trait OsModule extends OsLibModule { outer => + def ivyDeps = super.ivyDeps() ++ Agg(Deps.geny) + override def compileIvyDeps = T { + val scalaReflectOpt = Option.when(!ZincWorkerUtil.isDottyOrScala3(scalaVersion()))( + Deps.scalaReflect(scalaVersion()) + ) + super.compileIvyDeps() ++ scalaReflectOpt + } + + def artifactName = "os-lib" + + val scalaDocExternalMappings = Seq( + ".*scala.*::scaladoc3::https://scala-lang.org/api/3.x/", + ".*java.*::javadoc::https://docs.oracle.com/javase/8/docs/api/", + s".*geny.*::scaladoc3::https://javadoc.io/doc/com.lihaoyi/geny_3/${Deps.geny.dep.version}/" + ).mkString(",") + + def conditionalScalaDocOptions: T[Seq[String]] = T { + if (ZincWorkerUtil.isDottyOrScala3(scalaVersion())) + Seq( + s"-external-mappings:${scalaDocExternalMappings}" + ) + else Seq() + } + + def scalaDocOptions = super.scalaDocOptions() ++ conditionalScalaDocOptions() + + def generatedSources = T { + val conversions = for (i <- Range.inclusive(2, 22)) yield { + val ts = Range.inclusive(1, i).map(n => s"T$n").mkString(", ") + val fs = Range.inclusive(1, i).map(n => s"f$n: T$n => R").mkString(", ") + val vs = Range.inclusive(1, i).map(n => s"f$n(t._$n)").mkString(", ") + s""" implicit def tuple${i}Conversion[$ts] + | (t: ($ts)) + | (implicit $fs): R = { + | this.flatten($vs) + | } + |""".stripMargin + } + _root_.os.write( + T.dest / "os" / "GeneratedTupleConversions.scala", + s"""package os + |trait GeneratedTupleConversions[R]{ + | protected def flatten(vs: R*): R + | ${conversions.mkString("\n")} + |} + | + |""".stripMargin, + createFolders = true + ) + Seq(PathRef(T.dest)) + } +} + +object os extends Module { + + object jvm extends Cross[OsJvmModule](scalaVersions) + trait OsJvmModule extends OsModule with MiMaChecks { + object test extends ScalaTests with OsLibTestModule { + override def ivyDeps = T { super.ivyDeps() ++ Agg(Deps.expecty) } + + // we check the textual output of system commands and expect it in english + def forkEnv = super.forkEnv() ++ Map( + "TEST_JAR_WRITER_ASSEMBLY" -> testJarWriter.assembly().path.toString, + "TEST_JAR_READER_ASSEMBLY" -> testJarReader.assembly().path.toString, + "TEST_JAR_EXIT_ASSEMBLY" -> testJarExit.assembly().path.toString, + "TEST_SPAWN_EXIT_HOOK_ASSEMBLY" -> testSpawnExitHook.assembly().path.toString, + "TEST_SPAWN_EXIT_HOOK_ASSEMBLY2" -> testSpawnExitHook2.assembly().path.toString + ) + + object testJarWriter extends JavaModule + object testJarReader extends JavaModule + object testJarExit extends JavaModule + object testSpawnExitHook extends ScalaModule{ + def scalaVersion = OsJvmModule.this.scalaVersion() + def moduleDeps = Seq(OsJvmModule.this) + def scalacOptions = super.scalacOptions() ++ + Agg.when(scalaVersion().startsWith("3"))("-experimental") + } + object testSpawnExitHook2 extends JavaModule + } + + object nohometest extends ScalaTests with OsLibTestModule + } + + /*object native extends Cross[OsNativeModule](scalaVersions) + trait OsNativeModule extends OsModule with ScalaNativeModule { + def scalaNativeVersion = "0.5.2" + object test extends ScalaNativeTests with OsLibTestModule { + def nativeLinkStubs = true + } + object nohometest extends ScalaNativeTests with OsLibTestModule + }*/ + + object watch extends Module { + object jvm extends Cross[WatchJvmModule](scalaVersions) + trait WatchJvmModule extends OsLibModule { + def artifactName = "os-lib-watch" + def moduleDeps = super.moduleDeps ++ Seq(os.jvm()) + def ivyDeps = Agg(Deps.jna) + object test extends ScalaTests with OsLibTestModule { + def moduleDeps = super.moduleDeps ++ Seq(os.jvm().test) + + } + } + } +} diff --git a/build.sc b/build.sc index 0eeee910..934f9434 100644 --- a/build.sc +++ b/build.sc @@ -13,10 +13,9 @@ val communityBuildDottyVersion = sys.props.get("dottyVersion").toList val scala213Version = "2.13.10" val scalaVersions = Seq( - "3.1.3", + "3.3.1", "2.12.17", scala213Version, - "2.11.12" ) ++ communityBuildDottyVersion object Deps { @@ -69,6 +68,8 @@ trait OsLibModule with SafeDeps with PlatformScalaModule { outer => + def scalacPluginIvyDeps = super.scalacPluginIvyDeps() ++ Agg(ivy"com.lihaoyi::unroll-plugin:0.1.12") + def ivyDeps = super.ivyDeps() ++ Agg(ivy"com.lihaoyi::unroll-annotation:0.1.12") def publishVersion = VcsVersion.vcsState().format() def pomSettings = PomSettings( description = artifactName(), @@ -99,7 +100,7 @@ trait OsLibModule } trait OsModule extends OsLibModule { outer => - def ivyDeps = Agg(Deps.geny) + def ivyDeps = super.ivyDeps() ++ Agg(Deps.geny) def artifactName = "os-lib" @@ -131,7 +132,7 @@ object os extends Module { object native extends Cross[OsNativeModule](scalaVersions) trait OsNativeModule extends OsModule with ScalaNativeModule { - def scalaNativeVersion = "0.4.5" + def scalaNativeVersion = "0.4.14" object test extends ScalaNativeTests with OsLibTestModule { def nativeLinkStubs = true } diff --git a/mill b/mill index cb1ee32f..d03a045c 100755 --- a/mill +++ b/mill @@ -7,7 +7,7 @@ set -e if [ -z "${DEFAULT_MILL_VERSION}" ] ; then - DEFAULT_MILL_VERSION=0.11.0 + DEFAULT_MILL_VERSION=0.11.12 fi if [ -z "$MILL_VERSION" ] ; then @@ -53,7 +53,9 @@ if [ -z "$MILL_MAIN_CLI" ] ; then fi MILL_FIRST_ARG="" -if [ "$1" = "--bsp" ] || [ "$1" = "-i" ] || [ "$1" = "--interactive" ] || [ "$1" = "--no-server" ] || [ "$1" = "--repl" ] || [ "$1" = "--help" ] ; then + + # first arg is a long flag for "--interactive" or starts with "-i" +if [ "$1" = "--bsp" ] || [ "${1#"-i"}" != "$1" ] || [ "$1" = "--interactive" ] || [ "$1" = "--no-server" ] || [ "$1" = "--repl" ] || [ "$1" = "--help" ] ; then # Need to preserve the first position of those listed options MILL_FIRST_ARG=$1 shift diff --git a/os/src-2/Macros.scala b/os/src-2/Macros.scala new file mode 100644 index 00000000..00ea1b96 --- /dev/null +++ b/os/src-2/Macros.scala @@ -0,0 +1,83 @@ +package os + +import os.PathChunk.segmentsFromStringLiteralValidation + +import scala.language.experimental.macros +import scala.reflect.macros.blackbox +import acyclic.skipped + +// StringPathChunkConversion is a fallback to non-macro String => PathChunk implicit conversion in case eta expansion is needed, this is required for ArrayPathChunk and SeqPathChunk +trait PathChunkMacros extends StringPathChunkConversion { + implicit def stringPathChunkValidated(s: String): PathChunk = + macro Macros.stringPathChunkValidatedImpl +} +trait SubPathMacros extends StringPathChunkConversion { + implicit def stringSubPathValidated(s: String): SubPath = + macro Macros.stringSubPathValidatedImpl +} +trait RelPathMacros extends StringPathChunkConversion { + implicit def stringRelPathValidated(s: String): RelPath = + macro Macros.stringRelPathValidatedImpl +} +trait PathMacros extends StringPathChunkConversion { + implicit def stringPathValidated(s: String): Path = + macro Macros.stringPathValidatedImpl +} + +object Macros { + + def stringPathChunkValidatedImpl(c: blackbox.Context)(s: c.Expr[String]): c.Expr[PathChunk] = { + import c.universe.{Try => _, _} + + s match { + case Expr(Literal(Constant(literal: String))) => + val stringSegments = segmentsFromStringLiteralValidation(literal) + + c.Expr( + q"""new _root_.os.PathChunk.RelPathChunk(_root_.os.RelPath.fromStringSegments($stringSegments))""" + ) + case nonLiteral => + c.Expr( + q"new _root_.os.PathChunk.StringPathChunk($nonLiteral)" + ) + } + } + def stringSubPathValidatedImpl(c: blackbox.Context)(s: c.Expr[String]): c.Expr[SubPath] = { + import c.universe.{Try => _, _} + + s match { + case Expr(Literal(Constant(literal: String))) if !literal.startsWith("/") => + val stringSegments = segmentsFromStringLiteralValidation(literal) + + if (stringSegments.startsWith(Seq(".."))) { + c.abort(s.tree.pos, "Invalid subpath literal: " + s.tree) + } + c.Expr(q"""os.sub / _root_.os.RelPath.fromStringSegments($stringSegments)""") + + case _ => c.abort(s.tree.pos, "Invalid subpath literal: " + s.tree) + } + } + def stringRelPathValidatedImpl(c: blackbox.Context)(s: c.Expr[String]): c.Expr[RelPath] = { + import c.universe.{Try => _, _} + + s match { + case Expr(Literal(Constant(literal: String))) if !literal.startsWith("/") => + val stringSegments = segmentsFromStringLiteralValidation(literal) + c.Expr(q"""os.rel / _root_.os.RelPath.fromStringSegments($stringSegments)""") + + case _ => c.abort(s.tree.pos, "Invalid relative path literal: " + s.tree) + } + } + def stringPathValidatedImpl(c: blackbox.Context)(s: c.Expr[String]): c.Expr[Path] = { + import c.universe.{Try => _, _} + + s match { + case Expr(Literal(Constant(literal: String))) if literal.startsWith("/") => + val stringSegments = segmentsFromStringLiteralValidation(literal.stripPrefix("/")) + + c.Expr(q"""os.root / _root_.os.RelPath.fromStringSegments($stringSegments)""") + + case _ => c.abort(s.tree.pos, "Invalid absolute path literal: " + s.tree) + } + } +} diff --git a/os/src-3/Macros.scala b/os/src-3/Macros.scala new file mode 100644 index 00000000..3467f247 --- /dev/null +++ b/os/src-3/Macros.scala @@ -0,0 +1,87 @@ +package os + +import os.PathChunk.{RelPathChunk, StringPathChunk, segmentsFromString, segmentsFromStringLiteralValidation} +import os.RelPath.fromStringSegments + +import scala.quoted.{Expr, Quotes} +import acyclic.skipped + +// StringPathChunkConversion is a fallback to non-macro String => PathChunk implicit conversion in case eta expansion is needed, this is required for ArrayPathChunk and SeqPathChunk +trait PathChunkMacros extends StringPathChunkConversion { + inline implicit def stringPathChunkValidated(s: String): PathChunk = + ${ + Macros.stringPathChunkValidatedImpl('s) + } +} +trait SubPathMacros extends StringPathChunkConversion { + inline implicit def stringSubPathValidated(s: String): SubPath = + ${ + Macros.stringSubPathValidatedImpl('s) + } +} +trait RelPathMacros extends StringPathChunkConversion { + inline implicit def stringRelPathValidated(s: String): RelPath = + ${ + Macros.stringRelPathValidatedImpl('s) + } +} +trait PathMacros extends StringPathChunkConversion { + inline implicit def stringPathValidated(s: String): Path = + ${ + Macros.stringPathValidatedImpl('s) + } +} + +object Macros { + def stringPathChunkValidatedImpl(s: Expr[String])(using quotes: Quotes): Expr[PathChunk] = { + import quotes.reflect.* + + s.asTerm match { + case Inlined(_, _, Literal(StringConstant(literal))) => + val segments = segmentsFromStringLiteralValidation(literal) + '{ + new RelPathChunk(fromStringSegments(${Expr(segments)})) + } + case _ => + '{ + { + new StringPathChunk($s) + } + } + } + } + def stringSubPathValidatedImpl(s: Expr[String])(using quotes: Quotes): Expr[SubPath] = { + import quotes.reflect.* + + s.asTerm match { + case Inlined(_, _, Literal(StringConstant(literal))) if !literal.startsWith("/") => + val stringSegments = segmentsFromStringLiteralValidation(literal) + if (stringSegments.startsWith(Seq(".."))) { + report.errorAndAbort("Invalid subpath literal: " + s.show) + } + '{ os.sub / fromStringSegments(${Expr(stringSegments)}) } + case _ => report.errorAndAbort("Invalid subpath literal: " + s.show) + + } + } + def stringRelPathValidatedImpl(s: Expr[String])(using quotes: Quotes): Expr[RelPath] = { + import quotes.reflect.* + + s.asTerm match { + case Inlined(_, _, Literal(StringConstant(literal))) if !literal.startsWith("/") => + val segments = segmentsFromStringLiteralValidation(literal) + '{ fromStringSegments(${Expr(segments)}) } + case _ => report.errorAndAbort("Invalid relative path literal: " + s.show) + } + } + def stringPathValidatedImpl(s: Expr[String])(using quotes: Quotes): Expr[Path] = { + import quotes.reflect.* + + s.asTerm match { + case Inlined(_, _, Literal(StringConstant(literal))) if literal.startsWith("/") => + val segments = segmentsFromStringLiteralValidation(literal.stripPrefix("/")) + '{ os.root / fromStringSegments(${Expr(segments)}) } + case _ => report.errorAndAbort("Invalid absolute path literal: " + s.show) + } + } +} diff --git a/os/src-3/acyclic.scala b/os/src-3/acyclic.scala new file mode 100644 index 00000000..4ae8a1d5 --- /dev/null +++ b/os/src-3/acyclic.scala @@ -0,0 +1,6 @@ +package os +private[os] object acyclic { + + /** Mocks [[\\import acyclic.skipped]] for scala 3 */ + private[os] type skipped +} diff --git a/os/src-jvm/ResourceApi.scala b/os/src-jvm/ResourceApi.scala new file mode 100644 index 00000000..e6989dfe --- /dev/null +++ b/os/src-jvm/ResourceApi.scala @@ -0,0 +1,7 @@ +package os +trait ResourceApi { + def resource(implicit resRoot: ResourceRoot = Thread.currentThread().getContextClassLoader) = { + os.ResourcePath.resource(resRoot) + } + +} diff --git a/os/src-native/ResourceApi.scala b/os/src-native/ResourceApi.scala new file mode 100644 index 00000000..e2e9ffbc --- /dev/null +++ b/os/src-native/ResourceApi.scala @@ -0,0 +1,2 @@ +package os +trait ResourceApi diff --git a/os/src/FileOps.scala b/os/src/FileOps.scala index e07f1043..2805a53a 100644 --- a/os/src/FileOps.scala +++ b/os/src/FileOps.scala @@ -11,6 +11,7 @@ import java.nio.file.{Path => _, _} import java.nio.file.attribute.{FileAttribute, PosixFilePermission, PosixFilePermissions} import scala.util.Try +import scala.annotation.unroll /** * Create a single directory at the specified path. Optionally takes in a @@ -22,8 +23,12 @@ import scala.util.Try * ignore the destination if it already exists, using [[os.makeDir.all]] */ object makeDir extends Function1[Path, Unit] { - def apply(path: Path): Unit = Files.createDirectory(path.wrapped) + def apply(path: Path): Unit = { + checker.value.onWrite(path) + Files.createDirectory(path.wrapped) + } def apply(path: Path, perms: PermSet): Unit = { + checker.value.onWrite(path) Files.createDirectory( path.wrapped, PosixFilePermissions.asFileAttribute(perms.toSet()) @@ -38,6 +43,7 @@ object makeDir extends Function1[Path, Unit] { object all extends Function1[Path, Unit] { def apply(path: Path): Unit = apply(path, null, true) def apply(path: Path, perms: PermSet = null, acceptLinkedDirectory: Boolean = true): Unit = { + checker.value.onWrite(path) // We special case calling makeDir.all on a symlink to a directory; // normally createDirectories blows up noisily, when really what most // people would want is for it to succeed since there is a (linked) @@ -68,7 +74,7 @@ object move { def isDefinedAt(x: Path) = partialFunction.isDefinedAt(x) def apply(from: Path) = { val dest = partialFunction(from) - makeDir.all(dest / up) + if (dest.segmentCount != 0) makeDir.all(dest / up) os.move(from, dest, replaceExisting, atomicMove, createFolders) } } @@ -84,7 +90,9 @@ object move { atomicMove: Boolean = false, createFolders: Boolean = false ): Unit = { - if (createFolders) makeDir.all(to / up) + checker.value.onWrite(from) + checker.value.onWrite(to) + if (createFolders && to.segmentCount != 0) makeDir.all(to / up) val opts1 = if (replaceExisting) Array[CopyOption](StandardCopyOption.REPLACE_EXISTING) else Array[CopyOption]() @@ -149,7 +157,7 @@ object copy { def isDefinedAt(x: Path) = partialFunction.isDefinedAt(x) def apply(from: Path) = { val dest = partialFunction(from) - makeDir.all(dest / up) + if (dest.segmentCount != 0) makeDir.all(dest / up) os.copy( from, dest, @@ -174,9 +182,11 @@ object copy { replaceExisting: Boolean = false, copyAttributes: Boolean = false, createFolders: Boolean = false, - mergeFolders: Boolean = false + @unroll mergeFolders: Boolean = false ): Unit = { - if (createFolders) makeDir.all(to / up) + checker.value.onRead(from) + checker.value.onWrite(to) + if (createFolders && to.segmentCount != 0) makeDir.all(to / up) val opts1 = if (followLinks) Array[CopyOption]() else Array[CopyOption](LinkOption.NOFOLLOW_LINKS) @@ -191,43 +201,19 @@ object copy { s"Can't copy a directory into itself: $to is inside $from" ) - def copyOne(p: Path): file.Path = { + def copyOne(p: Path): Unit = { val target = to / p.relativeTo(from) if (mergeFolders && isDir(p, followLinks) && isDir(target, followLinks)) { // nothing to do - target.wrapped } else { Files.copy(p.wrapped, target.wrapped, opts1 ++ opts2 ++ opts3: _*) } } copyOne(from) - if (stat(from, followLinks = followLinks).isDir) walk(from).map(copyOne) + if (stat(from, followLinks = followLinks).isDir) for (p <- walk(from)) copyOne(p) } - /** This overload is only to keep binary compatibility with older os-lib versions. */ - @deprecated( - "Use os.copy(from, to, followLinks, replaceExisting, copyAttributes, " + - "createFolders, mergeFolders) instead", - "os-lib 0.7.5" - ) - def apply( - from: Path, - to: Path, - followLinks: Boolean, - replaceExisting: Boolean, - copyAttributes: Boolean, - createFolders: Boolean - ): Unit = apply( - from = from, - to = to, - followLinks = followLinks, - replaceExisting = replaceExisting, - copyAttributes = copyAttributes, - createFolders = createFolders, - mergeFolders = false - ) - /** * Copy a file into a particular folder, rather * than into a particular path @@ -240,7 +226,7 @@ object copy { replaceExisting: Boolean = false, copyAttributes: Boolean = false, createFolders: Boolean = false, - mergeFolders: Boolean = false + @unroll mergeFolders: Boolean = false ): Unit = { os.copy( from, @@ -252,29 +238,6 @@ object copy { mergeFolders ) } - - /** This overload is only to keep binary compatibility with older os-lib versions. */ - @deprecated( - "Use os.copy.into(from, to, followLinks, replaceExisting, copyAttributes, " + - "createFolders, mergeFolders) instead", - "os-lib 0.7.5" - ) - def apply( - from: Path, - to: Path, - followLinks: Boolean, - replaceExisting: Boolean, - copyAttributes: Boolean, - createFolders: Boolean - ): Unit = apply( - from = from, - to = to, - followLinks = followLinks, - replaceExisting = replaceExisting, - copyAttributes = copyAttributes, - createFolders = createFolders, - mergeFolders = false - ) } /** @@ -311,6 +274,7 @@ object copy { object remove extends Function1[Path, Boolean] { def apply(target: Path): Boolean = apply(target, false) def apply(target: Path, checkExists: Boolean = false): Boolean = { + checker.value.onWrite(target) if (checkExists) { Files.delete(target.wrapped) true @@ -320,15 +284,21 @@ object remove extends Function1[Path, Boolean] { } object all extends Function1[Path, Unit] { - def apply(target: Path) = { + def apply(target: Path): Unit = apply(target, ignoreErrors = false) + def apply(target: Path, ignoreErrors: Boolean = false): Unit = { require(target.segmentCount != 0, s"Cannot remove a root directory: $target") + checker.value.onWrite(target) val nioTarget = target.wrapped if (Files.exists(nioTarget, LinkOption.NOFOLLOW_LINKS)) { if (Files.isDirectory(nioTarget, LinkOption.NOFOLLOW_LINKS)) { - walk.stream(target, preOrder = false).foreach(remove(_)) + for (p <- walk.stream(target, preOrder = false)) { + try remove(p) + catch { case e: Throwable if ignoreErrors => /*ignore*/ } + } } - Files.delete(nioTarget) + try Files.delete(nioTarget) + catch { case e: Throwable if ignoreErrors => /*ignore*/ } } } } @@ -350,6 +320,8 @@ object exists extends Function1[Path, Boolean] { */ object hardlink { def apply(link: Path, dest: Path) = { + checker.value.onWrite(link) + checker.value.onRead(dest) Files.createLink(link.wrapped, dest.wrapped) } } @@ -359,6 +331,12 @@ object hardlink { */ object symlink { def apply(link: Path, dest: FilePath, perms: PermSet = null): Unit = { + checker.value.onWrite(link) + checker.value.onRead(dest match { + case p: RelPath => link / RelPath.up / p + case p: SubPath => link / RelPath.up / p + case p: Path => p + }) val permArray: Array[FileAttribute[_]] = if (perms == null) Array[FileAttribute[_]]() else Array(PosixFilePermissions.asFileAttribute(perms.toSet())) diff --git a/os/src/Internals.scala b/os/src/Internals.scala index 3e8564bd..a0cebf6e 100644 --- a/os/src/Internals.scala +++ b/os/src/Internals.scala @@ -7,18 +7,23 @@ object Internals { val emptyStringArray = Array.empty[String] - def transfer0(src: InputStream, sink: (Array[Byte], Int) => Unit) = { + def transfer0(src: InputStream, sink: (Array[Byte], Int) => Unit): Unit = { + transfer0(src, sink, true) + } + def transfer0(src: InputStream, sink: (Array[Byte], Int) => Unit, close: Boolean = true): Unit = { val buffer = new Array[Byte](8192) var r = 0 while (r != -1) { r = src.read(buffer) if (r != -1) sink(buffer, r) } - src.close() + if (close) src.close() } - def transfer(src: InputStream, dest: OutputStream) = transfer0( + def transfer(src: InputStream, dest: OutputStream): Unit = transfer(src, dest, true) + def transfer(src: InputStream, dest: OutputStream, close: Boolean = true): Unit = transfer0( src, - dest.write(_, 0, _) + dest.write(_, 0, _), + close ) } diff --git a/os/src/Model.scala b/os/src/Model.scala index 2d9fb45e..f5095f5a 100644 --- a/os/src/Model.scala +++ b/os/src/Model.scala @@ -215,7 +215,7 @@ case class SubprocessException(result: CommandResult) extends Exception(result.t * be "interpolated" directly into a subprocess call. */ case class Shellable(value: Seq[String]) -object Shellable { +object Shellable extends os.GeneratedTupleConversions[Shellable] { implicit def StringShellable(s: String): Shellable = Shellable(Seq(s)) implicit def CharSequenceShellable(cs: CharSequence): Shellable = Shellable(Seq(cs.toString)) @@ -232,6 +232,8 @@ object Shellable { implicit def ArrayShellable[T](s: Array[T])(implicit f: T => Shellable): Shellable = Shellable(s.toIndexedSeq.flatMap(f(_).value)) + + protected def flatten(vs: Shellable*): Shellable = IterableShellable(vs) } /** @@ -281,3 +283,30 @@ object PosixStatInfo { ) } } + +/** + * Defines hooks for path based operations. + * + * This, in conjunction with [[checker]], can be used to implement custom checks like + * - restricting an operation to some path(s) + * - logging an operation + */ +@experimental +trait Checker { + + /** A hook for a read operation on `path`. */ + def onRead(path: ReadablePath): Unit + + /** A hook for a write operation on `path`. */ + def onWrite(path: Path): Unit +} + +@experimental +object Checker { + + /** A no-op [[Checker]]. */ + object Nop extends Checker { + def onRead(path: ReadablePath): Unit = () + def onWrite(path: Path): Unit = () + } +} diff --git a/os/src/Path.scala b/os/src/Path.scala index d9fb0554..a7e559d5 100644 --- a/os/src/Path.scala +++ b/os/src/Path.scala @@ -2,32 +2,83 @@ package os import java.net.URI import java.nio.file.Paths - import collection.JavaConverters._ import scala.language.implicitConversions -import java.nio.file +import acyclic.skipped +import os.PathError.{InvalidSegment, NonCanonicalLiteral} + +import scala.util.{DynamicVariable, Try} //needed for cross-version defined macros trait PathChunk { def segments: Seq[String] def ups: Int } -object PathChunk { +trait StringPathChunkConversion { + + implicit def stringToPathChunk(s: String): PathChunk = + new PathChunk.StringPathChunkInternal(s) +} + +object PathChunk extends PathChunkMacros { + def segmentsFromString(s: String): Array[String] = { + val trailingSeparatorsCount = s.reverseIterator.takeWhile(_ == '/').length + val strNoTrailingSeps = s.dropRight(trailingSeparatorsCount) + val splitted = strNoTrailingSeps.split('/') + splitted ++ Array.fill(trailingSeparatorsCount)("") + } + private def reduceUps(in: Array[String]): List[String] = + in.foldLeft(List.empty[String]) { case (acc, x) => + acc match { + case h :: t if h == ".." => x :: acc + case h :: t if x == ".." => t + case _ => x :: acc + } + }.reverse + private[os] def segmentsFromStringLiteralValidation(literal: String): Array[String] = { + val stringSegments = segmentsFromString(literal) + val validSegmnts = reduceUps(validLiteralSegments(stringSegments)) + val sanitizedLiteral = validSegmnts.mkString("/") + if (validSegmnts.isEmpty) throw InvalidSegment( + literal, + s"Literal path sequence [$literal] doesn't affect path being formed, please remove it" + ) + if (literal != sanitizedLiteral) throw NonCanonicalLiteral(literal, sanitizedLiteral) + stringSegments + } + private def validLiteralSegments(segments: Array[String]): Array[String] = { + val AllowedLiteralSegment = ".." + segments.collect { + case AllowedLiteralSegment => AllowedLiteralSegment + case segment if Try(BasePath.checkSegment(segment)).isSuccess => segment + } + } + implicit class RelPathChunk(r: RelPath) extends PathChunk { def segments = r.segments def ups = r.ups override def toString() = r.toString } + implicit class SubPathChunk(r: SubPath) extends PathChunk { def segments = r.segments def ups = 0 override def toString() = r.toString } - implicit class StringPathChunk(s: String) extends PathChunk { + + // Implicit String => PathChunk conversion used inside os-lib, prevents macro expansion in same compilation unit + private[os] implicit class StringPathChunkInternal(s: String) extends PathChunk { BasePath.checkSegment(s) def segments = Seq(s) def ups = 0 override def toString() = s } + + // binary compatibility shim + class StringPathChunk(s: String) extends StringPathChunkInternal(s) + + // binary compatibility shim + def StringPathChunk(s: String): StringPathChunk = new StringPathChunk(s) + implicit class SymbolPathChunk(s: Symbol) extends PathChunk { BasePath.checkSegment(s.name) def segments = Seq(s.name) @@ -127,34 +178,31 @@ object BasePath { def checkSegment(s: String) = { def fail(msg: String) = throw PathError.InvalidSegment(s, msg) def considerStr = - "use the Path(...) or RelPath(...) constructor calls to convert them. " + "If you are dealing with dynamic path-strings coming from external sources, " + + "use the Path(...)/RelPath(...)/SubPath(...) constructor calls to convert them." s.indexOf('/') match { case -1 => // do nothing case c => fail( - s"[/] is not a valid character to appear in a path segment. " + - "If you want to parse an absolute or relative path that may have " + - "multiple segments, e.g. path-strings coming from external sources " + + s"[/] is not a valid character to appear in a non-literal path segment. " + considerStr ) } - def externalStr = "If you are dealing with path-strings coming from external sources, " s match { case "" => fail( - "OS-Lib does not allow empty path segments " + - externalStr + considerStr + "OS-Lib does not allow empty path segments. " + + considerStr ) case "." => fail( - "OS-Lib does not allow [.] as a path segment " + - externalStr + considerStr + "OS-Lib does not allow [.] in a non-literal path segment. " + + considerStr ) case ".." => fail( - "OS-Lib does not allow [..] as a path segment " + - externalStr + + "OS-Lib does not allow [..] in a non-literal path segment. " + considerStr + "If you want to use the `..` segment manually to represent going up " + "one level in the path, use the `up` segment from `os.up` " + @@ -227,6 +275,11 @@ object PathError { case class LastOnEmptyPath() extends IAE("empty path has no last segment") + + case class NonCanonicalLiteral(providedLiteral: String, sanitizedLiteral: String) + extends IAE( + s"Literal path sequence [$providedLiteral] used in OS-Lib must be in a canonical form, please use [$sanitizedLiteral] instead" + ) } /** @@ -296,7 +349,8 @@ class RelPath private[os] (segments0: Array[String], val ups: Int) def resolveFrom(base: os.Path) = base / this } -object RelPath { +object RelPath extends RelPathMacros { + def apply[T: PathConvertible](f0: T): RelPath = { val f = implicitly[PathConvertible[T]].apply(f0) @@ -319,6 +373,10 @@ object RelPath { val up: RelPath = new RelPath(Internals.emptyStringArray, 1) val rel: RelPath = new RelPath(Internals.emptyStringArray, 0) implicit def SubRelPath(p: SubPath): RelPath = new RelPath(p.segments0, 0) + def fromStringSegments(segments: Array[String]): RelPath = segments.foldLeft(RelPath.rel) { + case (agg, "..") => agg / up + case (agg, seg) => agg / seg + } } /** @@ -352,7 +410,7 @@ class SubPath private[os] (val segments0: Array[String]) def resolveFrom(base: os.Path) = base / this } -object SubPath { +object SubPath extends SubPathMacros { private[os] def relativeTo0(segments0: Array[String], segments: IndexedSeq[String]): RelPath = { val commonPrefix = { @@ -379,7 +437,32 @@ object SubPath { val sub: SubPath = new SubPath(Internals.emptyStringArray) } -object Path { +object Path extends PathMacros { + @experimental trait Serializer { + def serializeString(p: os.Path): String + def serializeFile(p: os.Path): java.io.File + def serializePath(p: os.Path): java.nio.file.Path + def deserialize(s: String): java.nio.file.Path + def deserialize(s: java.io.File): java.nio.file.Path + def deserialize(s: java.nio.file.Path): java.nio.file.Path + def deserialize(s: java.net.URI): java.nio.file.Path + } + @experimental val pathSerializer = new DynamicVariable[Serializer](defaultPathSerializer) + @experimental object defaultPathSerializer extends Serializer { + def serializeString(p: os.Path): String = p.wrapped.toString + def serializeFile(p: os.Path): java.io.File = p.wrapped.toFile + def serializePath(p: os.Path): java.nio.file.Path = p.wrapped + def deserialize(s: String) = Paths.get(s) + def deserialize(s: java.io.File) = Paths.get(s.getPath) + def deserialize(s: java.nio.file.Path) = s + def deserialize(s: java.net.URI) = s.getScheme() match { + case "file" => Paths.get(s) + case uriType => + throw new IllegalArgumentException( + s"""os.Path can only be created from a "file" URI scheme, but found "${uriType}"""" + ) + } + } def apply(p: FilePath, base: Path) = p match { case p: RelPath => base / p case p: SubPath => base / p @@ -394,8 +477,8 @@ object Path { val f = implicitly[PathConvertible[T]].apply(f0) if (f.subpath(0, 1).toString != "~") if (base == null) Path(f0) else Path(f0, base) else { - Path(System.getProperty("user.home"))(PathConvertible.StringConvertible) / - RelPath(f.subpath(0, 1).relativize(f))(PathConvertible.NioPathConvertible) + Path(System.getProperty("user.home"))(using PathConvertible.StringConvertible) / + RelPath(f.subpath(0, 1).relativize(f))(using PathConvertible.NioPathConvertible) } } @@ -473,6 +556,7 @@ object Path { trait ReadablePath { def toSource: os.Source + def getInputStream: java.io.InputStream } @@ -482,8 +566,10 @@ trait ReadablePath { */ class Path private[os] (val wrapped: java.nio.file.Path) extends FilePath with ReadablePath with BasePathImpl { - def toSource: SeekableSource = - new SeekableSource.ChannelSource(java.nio.file.Files.newByteChannel(wrapped)) + def toSource: SeekableSource = new SeekableSource.ChannelLengthSource( + java.nio.file.Files.newByteChannel(wrapped), + java.nio.file.Files.size(wrapped) + ) require(wrapped.isAbsolute || Path.driveRelative(wrapped), s"$wrapped is not an absolute path") def root = Option(wrapped.getRoot).map(_.toString).getOrElse("") @@ -501,7 +587,7 @@ class Path private[os] (val wrapped: java.nio.file.Path) val resolved = wrapped.resolve(chunk.toString).normalize() new Path(resolved) } - override def toString = wrapped.toString + override def toString = Path.pathSerializer.value.serializeString(this) override def equals(o: Any): Boolean = o match { case p: Path => wrapped.equals(p.wrapped) @@ -532,8 +618,8 @@ class Path private[os] (val wrapped: java.nio.file.Path) new RelPath(segments.drop(nonUpIndex), nonUpIndex) } - def toIO: java.io.File = wrapped.toFile - def toNIO: java.nio.file.Path = wrapped + def toIO: java.io.File = Path.pathSerializer.value.serializeFile(this) + def toNIO: java.nio.file.Path = Path.pathSerializer.value.serializePath(this) def resolveFrom(base: os.Path) = this @@ -547,23 +633,18 @@ sealed trait PathConvertible[T] { object PathConvertible { implicit object StringConvertible extends PathConvertible[String] { - def apply(t: String) = Paths.get(t) + def apply(t: String) = Path.pathSerializer.value.deserialize(t) } implicit object JavaIoFileConvertible extends PathConvertible[java.io.File] { - def apply(t: java.io.File) = Paths.get(t.getPath) + def apply(t: java.io.File) = Path.pathSerializer.value.deserialize(t) } implicit object NioPathConvertible extends PathConvertible[java.nio.file.Path] { - def apply(t: java.nio.file.Path) = t + def apply(t: java.nio.file.Path) = Path.pathSerializer.value.deserialize(t) override def isCustomFs(t: java.nio.file.Path): Boolean = t.getFileSystem() != java.nio.file.FileSystems.getDefault() } implicit object UriPathConvertible extends PathConvertible[URI] { - def apply(uri: URI) = uri.getScheme() match { - case "file" => Paths.get(uri) - case uriType => - throw new IllegalArgumentException( - s"""os.Path can only be created from a "file" URI scheme, but found "${uriType}"""" - ) - } + def apply(uri: URI) = Path.pathSerializer.value.deserialize(uri) + } } diff --git a/os/src/PermsOps.scala b/os/src/PermsOps.scala index 95f6bb32..73ef6d19 100644 --- a/os/src/PermsOps.scala +++ b/os/src/PermsOps.scala @@ -24,6 +24,7 @@ object perms extends Function1[Path, PermSet] { */ object set { def apply(p: Path, arg2: PermSet): Unit = { + checker.value.onWrite(p) Files.setPosixFilePermissions(p.wrapped, arg2.toSet()) } } @@ -44,7 +45,10 @@ object owner extends Function1[Path, UserPrincipal] { * Set the owner of the file/folder at the given path */ object set { - def apply(arg1: Path, arg2: UserPrincipal): Unit = Files.setOwner(arg1.wrapped, arg2) + def apply(arg1: Path, arg2: UserPrincipal): Unit = { + checker.value.onWrite(arg1) + Files.setOwner(arg1.wrapped, arg2) + } def apply(arg1: Path, arg2: String): Unit = { apply( arg1, @@ -73,6 +77,7 @@ object group extends Function1[Path, GroupPrincipal] { */ object set { def apply(arg1: Path, arg2: GroupPrincipal): Unit = { + checker.value.onWrite(arg1) Files.getFileAttributeView( arg1.wrapped, classOf[PosixFileAttributeView], diff --git a/os/src/ProcessOps.scala b/os/src/ProcessOps.scala new file mode 100644 index 00000000..e4a30629 --- /dev/null +++ b/os/src/ProcessOps.scala @@ -0,0 +1,660 @@ +package os + +import collection.JavaConverters._ +import java.lang.ProcessBuilder.Redirect +import os.SubProcess.InputStream +import java.io.IOException +import java.util.concurrent.LinkedBlockingQueue +import ProcessOps._ + +object call { + + /** + * @see [[os.proc.call]] + */ + def apply( + cmd: Shellable, + env: Map[String, String] = null, + // Make sure `cwd` only comes after `env`, so `os.call("foo", path)` is a compile error + // since the correct syntax is `os.call(("foo", path))` + cwd: Path = null, + stdin: ProcessInput = Pipe, + stdout: ProcessOutput = Pipe, + stderr: ProcessOutput = os.Inherit, + mergeErrIntoOut: Boolean = false, + timeout: Long = -1, + check: Boolean = true, + propagateEnv: Boolean = true, + shutdownGracePeriod: Long = 100, + destroyOnExit: Boolean = true + ): CommandResult = { + os.proc(cmd).call( + cwd = cwd, + env = env, + stdin = stdin, + stdout = stdout, + stderr = stderr, + mergeErrIntoOut = mergeErrIntoOut, + timeout = timeout, + check = check, + propagateEnv = propagateEnv, + shutdownGracePeriod = shutdownGracePeriod, + destroyOnExit = destroyOnExit + ) + } + + // Bincompat Forwarder + def apply( + cmd: Shellable, + env: Map[String, String], + // Make sure `cwd` only comes after `env`, so `os.call("foo", path)` is a compile error + // since the correct syntax is `os.call(("foo", path))` + cwd: Path, + stdin: ProcessInput, + stdout: ProcessOutput, + stderr: ProcessOutput, + mergeErrIntoOut: Boolean, + timeout: Long, + check: Boolean, + propagateEnv: Boolean, + timeoutGracePeriod: Long + ): CommandResult = { + call( + cmd = cmd, + cwd = cwd, + env = env, + stdin = stdin, + stdout = stdout, + stderr = stderr, + mergeErrIntoOut = mergeErrIntoOut, + timeout = timeout, + check = check, + propagateEnv = propagateEnv, + shutdownGracePeriod = timeoutGracePeriod, + destroyOnExit = true + ) + } +} +object spawn { + + /** + * @see [[os.proc.spawn]] + */ + def apply( + cmd: Shellable, + // Make sure `cwd` only comes after `env`, so `os.spawn("foo", path)` is a compile error + // since the correct syntax is `os.spawn(("foo", path))` + env: Map[String, String] = null, + cwd: Path = null, + stdin: ProcessInput = Pipe, + stdout: ProcessOutput = Pipe, + stderr: ProcessOutput = os.Inherit, + mergeErrIntoOut: Boolean = false, + propagateEnv: Boolean = true, + shutdownGracePeriod: Long = 100, + destroyOnExit: Boolean = true + ): SubProcess = { + os.proc(cmd).spawn( + cwd = cwd, + env = env, + stdin = stdin, + stdout = stdout, + stderr = stderr, + mergeErrIntoOut = mergeErrIntoOut, + propagateEnv = propagateEnv, + shutdownGracePeriod = shutdownGracePeriod, + destroyOnExit = destroyOnExit + ) + } + + // Bincompat Forwarder + def apply( + cmd: Shellable, + // Make sure `cwd` only comes after `env`, so `os.spawn("foo", path)` is a compile error + // since the correct syntax is `os.spawn(("foo", path))` + env: Map[String, String], + cwd: Path, + stdin: ProcessInput, + stdout: ProcessOutput, + stderr: ProcessOutput, + mergeErrIntoOut: Boolean, + propagateEnv: Boolean + ): SubProcess = { + spawn( + cmd = cmd, + cwd = cwd, + env = env, + stdin = stdin, + stdout = stdout, + stderr = stderr, + mergeErrIntoOut = mergeErrIntoOut, + propagateEnv = propagateEnv, + shutdownGracePeriod = 100, + destroyOnExit = true + ) + } +} + +/** + * Convenience APIs around [[java.lang.Process]] and [[java.lang.ProcessBuilder]]: + * + * - os.proc.call provides a convenient wrapper for "function-like" processes + * that you invoke with some input, whose entire output you need, but + * otherwise do not have any intricate back-and-forth communication + * + * - os.proc.stream provides a lower level API: rather than providing the output + * all at once, you pass in callbacks it invokes whenever there is a chunk of + * output received from the spawned process. + * + * - os.proc(...) provides the lowest level API: an simple Scala API around + * [[java.lang.ProcessBuilder]], that spawns a normal [[java.lang.Process]] + * for you to deal with. You can then interact with it normally through + * the standard stdin/stdout/stderr streams, using whatever protocol you + * want + */ +case class proc(command: Shellable*) { + def commandChunks: Seq[String] = command.flatMap(_.value) + + /** + * Invokes the given subprocess like a function, passing in input and returning a + * [[CommandResult]]. You can then call `result.exitCode` to see how it exited, or + * `result.out.bytes` or `result.err.text()` to access the aggregated stdout and + * stderr of the subprocess in a number of convenient ways. If a non-zero exit code + * is returned, this throws a [[os.SubprocessException]] containing the + * [[CommandResult]], unless you pass in `check = false`. + * + * If you want to spawn an interactive subprocess, such as `vim`, `less`, or a + * `python` shell, set all of `stdin`/`stdout`/`stderr` to [[os.Inherit]] + * + * `call` provides a number of parameters that let you configure how the subprocess + * is run: + * + * @param cwd the working directory of the subprocess + * @param env any additional environment variables you wish to set in the subprocess + * @param stdin any data you wish to pass to the subprocess's standard input + * @param stdout How the process's output stream is configured. + * @param stderr How the process's error stream is configured. + * @param mergeErrIntoOut merges the subprocess's stderr stream into it's stdout + * @param timeout how long to wait in milliseconds for the subprocess to complete + * (-1 for no timeout) + * @param check disable this to avoid throwing an exception if the subprocess + * fails with a non-zero exit code + * @param propagateEnv disable this to avoid passing in this parent process's + * environment variables to the subprocess + * @param shutdownGracePeriod if the timeout is enabled, how long in milliseconds for the + * subprocess to gracefully terminate before attempting to + * forcibly kill it + * (-1 for no kill, 0 for always kill immediately) + * + * @note the issuing of `SIGTERM` instead of `SIGKILL` is implementation dependent on your JVM version. Pre-Java 9, no `SIGTERM` may be + * issued. Check the documentation for your JDK's `Process.destroy`. + */ + def call( + cwd: Path = null, + env: Map[String, String] = null, + stdin: ProcessInput = Pipe, + stdout: ProcessOutput = Pipe, + stderr: ProcessOutput = os.Inherit, + mergeErrIntoOut: Boolean = false, + timeout: Long = -1, + check: Boolean = true, + propagateEnv: Boolean = true, + // this cannot be next to `timeout` as this will introduce a bin-compat break (default arguments are numbered in the bytecode) + shutdownGracePeriod: Long = 100, + destroyOnExit: Boolean = true + ): CommandResult = { + + val chunks = new java.util.concurrent.ConcurrentLinkedQueue[Either[geny.Bytes, geny.Bytes]] + + val sub = spawn( + cwd, + env, + stdin, + if (stdout ne os.Pipe) stdout + else os.ProcessOutput.ReadBytes((buf, n) => + chunks.add(Left(new geny.Bytes(java.util.Arrays.copyOf(buf, n)))) + ), + if (stderr ne os.Pipe) stderr + else os.ProcessOutput.ReadBytes((buf, n) => + chunks.add(Right(new geny.Bytes(java.util.Arrays.copyOf(buf, n)))) + ), + mergeErrIntoOut, + propagateEnv, + shutdownGracePeriod = shutdownGracePeriod, + destroyOnExit = destroyOnExit + ) + + sub.join(timeout, shutdownGracePeriod) + + val chunksSeq = chunks.iterator.asScala.toIndexedSeq + val res = CommandResult(commandChunks, sub.exitCode(), chunksSeq) + if (res.exitCode == 0 || !check) res + else throw SubprocessException(res) + } + + // forwarder for the new timeoutGracePeriod flag + private[os] def call( + cwd: Path, + env: Map[String, String], + stdin: ProcessInput, + stdout: ProcessOutput, + stderr: ProcessOutput, + mergeErrIntoOut: Boolean, + timeout: Long, + check: Boolean, + propagateEnv: Boolean + ): CommandResult = call( + cwd, + env, + stdin, + stdout, + stderr, + mergeErrIntoOut, + timeout, + check, + propagateEnv, + shutdownGracePeriod = 100 + ) + + // Bincompat Forwarder + private[os] def call( + cwd: Path, + env: Map[String, String], + stdin: ProcessInput, + stdout: ProcessOutput, + stderr: ProcessOutput, + mergeErrIntoOut: Boolean, + timeout: Long, + check: Boolean, + propagateEnv: Boolean, + timeoutGracePeriod: Long + ): CommandResult = call( + cwd, + env, + stdin, + stdout, + stderr, + mergeErrIntoOut, + timeout, + check, + propagateEnv, + timeoutGracePeriod, + destroyOnExit = true + ) + + /** + * The most flexible of the [[os.proc]] calls, `os.proc.spawn` simply configures + * and starts a subprocess, and returns it as a `java.lang.Process` for you to + * interact with however you like. + * + * Note that if you provide `ProcessOutput` callbacks to `stdout`/`stderr`, + * the calls to those callbacks take place on newly spawned threads that + * execute in parallel with the main thread. Thus make sure any data + * processing you do in those callbacks is thread safe! + */ + def spawn( + cwd: Path = null, + env: Map[String, String] = null, + stdin: ProcessInput = Pipe, + stdout: ProcessOutput = Pipe, + stderr: ProcessOutput = os.Inherit, + mergeErrIntoOut: Boolean = false, + propagateEnv: Boolean = true, + shutdownGracePeriod: Long = 100, + destroyOnExit: Boolean = true + ): SubProcess = { + + val cmdChunks = commandChunks + val commandStr = cmdChunks.mkString(" ") + + def resolve[T](x: T, y: T) = if (x == os.Inherit) y else x + val resolvedStdin = resolve(stdin, os.Inherit.in.value) + val resolvedStdout = resolve(stdout, os.Inherit.out.value) + val resolvedStderr = resolve(stderr, os.Inherit.err.value) + + val builder = buildProcess( + commandChunks, + cwd, + env, + resolvedStdin, + resolvedStdout, + resolvedStderr, + mergeErrIntoOut, + propagateEnv + ) + + lazy val shutdownHookThread = + if (!destroyOnExit) None + else Some(new Thread("subprocess-shutdown-hook") { + override def run(): Unit = proc.destroy(shutdownGracePeriod) + }) + + lazy val shutdownHookMonitorThread = shutdownHookThread.map(t => + new Thread("subprocess-shutdown-hook-monitor") { + override def run(): Unit = { + while (proc.wrapped.isAlive) Thread.sleep(1) + try Runtime.getRuntime().removeShutdownHook(t) + catch { case e: Throwable => /*do nothing*/ } + } + } + ) + + shutdownHookThread.foreach(Runtime.getRuntime().addShutdownHook) + + lazy val proc: SubProcess = new SubProcess( + builder.start(), + resolvedStdin.processInput(proc.stdin).map(new Thread(_, commandStr + " stdin thread")), + resolvedStdout.processOutput(proc.stdout).map(new Thread(_, commandStr + " stdout thread")), + resolvedStderr.processOutput(proc.stderr).map(new Thread(_, commandStr + " stderr thread")), + shutdownGracePeriod = shutdownGracePeriod, + shutdownHookMonitorThread = shutdownHookMonitorThread + ) + + shutdownHookMonitorThread.foreach(_.start()) + + proc.inputPumperThread.foreach(_.start()) + proc.outputPumperThread.foreach(_.start()) + proc.errorPumperThread.foreach(_.start()) + proc + } + + // Bincompat Forwarder + def spawn( + cwd: Path, + env: Map[String, String], + stdin: ProcessInput, + stdout: ProcessOutput, + stderr: ProcessOutput, + mergeErrIntoOut: Boolean, + propagateEnv: Boolean + ): SubProcess = spawn( + cwd = cwd, + env = env, + stdin = stdin, + stdout = stdout, + stderr = stderr, + mergeErrIntoOut = mergeErrIntoOut, + propagateEnv = propagateEnv, + shutdownGracePeriod = 100, + destroyOnExit = true + ) + + /** + * Pipes the output of this process into the input of the [[next]] process. Returns a + * [[ProcGroup]] containing both processes, which you can then either execute or + * pipe further. + */ + def pipeTo(next: proc): ProcGroup = ProcGroup(Seq(this, next)) +} + +/** + * A group of processes that are piped together, corresponding to e.g. `ls -l | grep .scala`. + * You can create a `ProcGroup` by calling `.pipeTo` on a [[proc]] multiple times. + * Contains methods corresponding to the methods on [[proc]], but defined for pipelines + * of processes. + */ +case class ProcGroup private[os] (commands: Seq[proc]) { + assert(commands.size >= 2) + + private lazy val isWindows = sys.props("os.name").toLowerCase().contains("windows") + + /** + * Invokes the given pipeline like a function, passing in input and returning a + * [[CommandResult]]. You can then call `result.exitCode` to see how it exited, or + * `result.out.bytes` or `result.err.string` to access the aggregated stdout and + * stderr of the subprocess in a number of convenient ways. If a non-zero exit code + * is returned, this throws a [[os.SubprocessException]] containing the + * [[CommandResult]], unless you pass in `check = false`. + * + * For each process in pipeline, the output will be forwarded to the input of the next + * process. Input of the first process is set to provided [[stdin]] The output of the last + * process will be returned as the output of the pipeline. [[stderr]] is set for all processes. + * + * `call` provides a number of parameters that let you configure how the pipeline + * is run: + * + * @param cwd the working directory of the pipeline + * @param env any additional environment variables you wish to set in the pipeline + * @param stdin any data you wish to pass to the pipelines's standard input (to the first process) + * @param stdout How the pipelines's output stream is configured (the last process stdout) + * @param stderr How the process's error stream is configured (set for all processes) + * @param mergeErrIntoOut merges the pipeline's stderr stream into it's stdout. Note that then the + * stderr will be forwarded with stdout to subsequent processes in the pipeline. + * @param timeout how long to wait in milliseconds for the pipeline to complete + * @param check disable this to avoid throwing an exception if the pipeline + * fails with a non-zero exit code + * @param propagateEnv disable this to avoid passing in this parent process's + * environment variables to the pipeline + * @param pipefail if true, the pipeline's exitCode will be the exit code of the first + * failing process. If no process fails, the exit code will be 0. + * @param handleBrokenPipe if true, every [[java.io.IOException]] when redirecting output of a process + * will be caught and handled by killing the writing process. This behaviour + * is consistent with handlers of SIGPIPE signals in most programs + * supporting interruptable piping. Disabled by default on Windows. + * @param shutdownGracePeriod if the timeout is enabled, how long in milliseconds for the + * subprocess to gracefully terminate before attempting to + * forcibly kill it + * (-1 for no kill, 0 for always kill immediately) + * + * @note the issuing of `SIGTERM` instead of `SIGKILL` is implementation dependent on your JVM version. Pre-Java 9, no `SIGTERM` may be + * issued. Check the documentation for your JDK's `Process.destroy`. + */ + def call( + cwd: Path = null, + env: Map[String, String] = null, + stdin: ProcessInput = Pipe, + stdout: ProcessOutput = Pipe, + stderr: ProcessOutput = os.Inherit, + mergeErrIntoOut: Boolean = false, + timeout: Long = -1, + check: Boolean = true, + propagateEnv: Boolean = true, + pipefail: Boolean = true, + handleBrokenPipe: Boolean = !isWindows, + // this cannot be next to `timeout` as this will introduce a bin-compat break (default arguments are numbered in the bytecode) + shutdownGracePeriod: Long = 100 + ): CommandResult = { + val chunks = new java.util.concurrent.ConcurrentLinkedQueue[Either[geny.Bytes, geny.Bytes]] + + val sub = spawn( + cwd, + env, + stdin, + if (stdout ne os.Pipe) stdout + else os.ProcessOutput.ReadBytes((buf, n) => + chunks.add(Left(new geny.Bytes(java.util.Arrays.copyOf(buf, n)))) + ), + if (stderr ne os.Pipe) stderr + else os.ProcessOutput.ReadBytes((buf, n) => + chunks.add(Right(new geny.Bytes(java.util.Arrays.copyOf(buf, n)))) + ), + mergeErrIntoOut, + propagateEnv, + pipefail + ) + + sub.join(timeout, shutdownGracePeriod) + + val chunksSeq = chunks.iterator.asScala.toIndexedSeq + val res = + CommandResult(commands.flatMap(_.commandChunks :+ "|").init, sub.exitCode(), chunksSeq) + if (res.exitCode == 0 || !check) res + else throw SubprocessException(res) + } + + private[os] def call( + cwd: Path, + env: Map[String, String], + stdin: ProcessInput, + stdout: ProcessOutput, + stderr: ProcessOutput, + mergeErrIntoOut: Boolean, + timeout: Long, + check: Boolean, + propagateEnv: Boolean, + pipefail: Boolean, + handleBrokenPipe: Boolean + ): CommandResult = call( + cwd, + env, + stdin, + stdout, + stderr, + mergeErrIntoOut, + timeout, + check, + propagateEnv, + pipefail, + handleBrokenPipe, + shutdownGracePeriod = 100 + ) + + /** + * The most flexible of the [[os.ProcGroup]] calls. It sets-up a pipeline of processes, + * and returns a [[ProcessPipeline]] for you to interact with however you like. + * + * Note that if you provide `ProcessOutput` callbacks to `stdout`/`stderr`, + * the calls to those callbacks take place on newly spawned threads that + * execute in parallel with the main thread. Thus make sure any data + * processing you do in those callbacks is thread safe! + * @param cwd the working directory of the pipeline + * @param env any additional environment variables you wish to set in the pipeline + * @param stdin any data you wish to pass to the pipelines's standard input (to the first process) + * @param stdout How the pipelines's output stream is configured (the last process stdout) + * @param stderr How the process's error stream is configured (set for all processes) + * @param mergeErrIntoOut merges the pipeline's stderr stream into it's stdout. Note that then the + * stderr will be forwarded with stdout to subsequent processes in the pipeline. + * @param propagateEnv disable this to avoid passing in this parent process's + * environment variables to the pipeline + * @param pipefail if true, the pipeline's exitCode will be the exit code of the first + * failing process. If no process fails, the exit code will be 0. + * @param handleBrokenPipe if true, every [[java.io.IOException]] when redirecting output of a process + * will be caught and handled by killing the writing process. This behaviour + * is consistent with handlers of SIGPIPE signals in most programs + * supporting interruptable piping. Disabled by default on Windows. + */ + def spawn( + cwd: Path = null, + env: Map[String, String] = null, + stdin: ProcessInput = Pipe, + stdout: ProcessOutput = Pipe, + stderr: ProcessOutput = os.Inherit, + mergeErrIntoOut: Boolean = false, + propagateEnv: Boolean = true, + pipefail: Boolean = true, + handleBrokenPipe: Boolean = !isWindows + ): ProcessPipeline = { + val brokenPipeQueue = new LinkedBlockingQueue[Int]() + val (_, procs) = + commands.zipWithIndex.foldLeft((Option.empty[ProcessInput], Seq.empty[SubProcess])) { + case ((None, _), (proc, _)) => + val spawned = proc.spawn(cwd, env, stdin, Pipe, stderr, mergeErrIntoOut, propagateEnv) + (Some(spawned.stdout), Seq(spawned)) + case ((Some(input), acc), (proc, index)) if index == commands.length - 1 => + val spawned = proc.spawn( + cwd, + env, + wrapWithBrokenPipeHandler(input, index - 1, brokenPipeQueue), + stdout, + stderr, + mergeErrIntoOut, + propagateEnv + ) + (None, acc :+ spawned) + case ((Some(input), acc), (proc, index)) => + val spawned = proc.spawn( + cwd, + env, + wrapWithBrokenPipeHandler(input, index - 1, brokenPipeQueue), + Pipe, + stderr, + mergeErrIntoOut, + propagateEnv + ) + (Some(spawned.stdout), acc :+ spawned) + } + val pipeline = + new ProcessPipeline(procs, pipefail, if (handleBrokenPipe) Some(brokenPipeQueue) else None) + pipeline.brokenPipeHandler.foreach(_.start()) + pipeline + } + + private def wrapWithBrokenPipeHandler( + wrapped: ProcessInput, + index: Int, + queue: LinkedBlockingQueue[Int] + ) = + new ProcessInput { + override def redirectFrom: Redirect = wrapped.redirectFrom + override def processInput(stdin: => InputStream): Option[Runnable] = + wrapped.processInput(stdin).map { runnable => + new Runnable { + def run() = { + try { + runnable.run() + } catch { + case e: IOException => + queue.put(index) + } + } + } + } + } + + /** + * Pipes the output of this pipeline into the input of the [[next]] process. + */ + def pipeTo(next: proc) = ProcGroup(commands :+ next) +} + +@experimental +object ProcessOps { + val spawnHook = new scala.util.DynamicVariable[os.Path => Unit]({ p => () }) + def buildProcess( + command: Seq[String], + cwd: Path = null, + env: Map[String, String] = null, + stdin: ProcessInput = Pipe, + stdout: ProcessOutput = Pipe, + stderr: ProcessOutput = os.Inherit, + mergeErrIntoOut: Boolean = false, + propagateEnv: Boolean = true + ): ProcessBuilder = { + val builder = new java.lang.ProcessBuilder() + + val environment = builder.environment() + + def addToProcessEnv(env: Map[String, String]) = + if (env != null) { + for ((k, v) <- env) { + if (v != null) environment.put(k, v) + else environment.remove(k) + } + } + + os.SubProcess.env.value match { + case null => + if (!propagateEnv) { + environment.clear() + } + case subProcessEnvValue => + environment.clear() + if (propagateEnv) { + addToProcessEnv(subProcessEnvValue) + } + } + + addToProcessEnv(env) + + val dir = Option(cwd).getOrElse(os.pwd) + builder.directory(dir.toIO) + spawnHook.value.apply(dir) + + builder + .command(command: _*) + .redirectInput(stdin.redirectFrom) + .redirectOutput(stdout.redirectTo) + .redirectError(stderr.redirectTo) + .redirectErrorStream(mergeErrIntoOut) + } +} diff --git a/os/src/ReadWriteOps.scala b/os/src/ReadWriteOps.scala index f1f999fb..62b8c0be 100644 --- a/os/src/ReadWriteOps.scala +++ b/os/src/ReadWriteOps.scala @@ -27,6 +27,7 @@ object write { createFolders: Boolean = false, openOptions: Seq[OpenOption] = Seq(CREATE, WRITE) ) = { + checker.value.onWrite(target) if (createFolders) makeDir.all(target / RelPath.up, perms) if (perms != null && !exists(target)) { val permArray = @@ -34,6 +35,7 @@ object write { else Array(PosixFilePermissions.asFileAttribute(perms.toSet())) java.nio.file.Files.createFile(target.toNIO, permArray: _*) } + java.nio.file.Files.newOutputStream( target.toNIO, openOptions.toArray: _* @@ -52,6 +54,7 @@ object write { perms: PermSet, offset: Long ) = { + checker.value.onWrite(target) import collection.JavaConverters._ val permArray: Array[FileAttribute[_]] = @@ -165,6 +168,7 @@ object write { */ object channel extends Function1[Path, SeekableByteChannel] { def write(p: Path, options: Seq[StandardOpenOption]) = { + checker.value.onWrite(p) java.nio.file.Files.newByteChannel(p.toNIO, options.toArray: _*) } def apply(p: Path): SeekableByteChannel = { @@ -211,6 +215,7 @@ object write { */ object truncate { def apply(p: Path, size: Long): Unit = { + checker.value.onWrite(p) val channel = FileChannel.open(p.toNIO, StandardOpenOption.WRITE) try channel.truncate(size) finally channel.close() @@ -241,15 +246,21 @@ object read extends Function1[ReadablePath, String] { * Opens a [[java.io.InputStream]] to read from the given file */ object inputStream extends Function1[ReadablePath, java.io.InputStream] { - def apply(p: ReadablePath): java.io.InputStream = p.getInputStream + def apply(p: ReadablePath): java.io.InputStream = { + checker.value.onRead(p) + p.getInputStream + } } object stream extends Function1[ReadablePath, geny.Readable] { - def apply(p: ReadablePath): geny.Readable = new geny.Readable { - def readBytesThrough[T](f: java.io.InputStream => T): T = { - val is = p.getInputStream - try f(is) - finally is.close() + def apply(p: ReadablePath): geny.Readable = { + new geny.Readable { + override def contentLength: Option[Long] = p.toSource.contentLength + def readBytesThrough[T](f: java.io.InputStream => T): T = { + val is = os.read.inputStream(p) + try f(is) + finally is.close() + } } } } @@ -258,7 +269,10 @@ object read extends Function1[ReadablePath, String] { * Opens a [[SeekableByteChannel]] to read from the given file. */ object channel extends Function1[Path, SeekableByteChannel] { - def apply(p: Path): SeekableByteChannel = p.toSource.getChannel() + def apply(p: Path): SeekableByteChannel = { + checker.value.onRead(p) + p.toSource.getChannel() + } } /** @@ -269,7 +283,7 @@ object read extends Function1[ReadablePath, String] { object bytes extends Function1[ReadablePath, Array[Byte]] { def apply(arg: ReadablePath): Array[Byte] = { val out = new java.io.ByteArrayOutputStream() - val stream = arg.getInputStream + val stream = os.read.inputStream(arg) try Internals.transfer(stream, out) finally stream.close() out.toByteArray @@ -277,7 +291,7 @@ object read extends Function1[ReadablePath, String] { def apply(arg: Path, offset: Long, count: Int): Array[Byte] = { val arr = new Array[Byte](count) val buf = ByteBuffer.wrap(arr) - val channel = arg.toSource.getChannel() + val channel = os.read.channel(arg) try { channel.position(offset) val finalCount = channel.read(buf) @@ -358,7 +372,7 @@ object read extends Function1[ReadablePath, String] { def apply(arg: ReadablePath, charSet: Codec) = { new geny.Generator[String] { def generate(handleItem: String => Generator.Action) = { - val is = arg.getInputStream + val is = os.read.inputStream(arg) val isr = new InputStreamReader(is, charSet.decoder) val buf = new BufferedReader(isr) var currentAction: Generator.Action = Generator.Continue diff --git a/os/src/Source.scala b/os/src/Source.scala index 002dca09..75c291a8 100644 --- a/os/src/Source.scala +++ b/os/src/Source.scala @@ -66,6 +66,8 @@ object Source extends WritableLowPri { implicit class WritableSource[T](s: T)(implicit f: T => geny.Writable) extends Source { val writable = f(s) + + override def contentLength: Option[Long] = writable.contentLength def getHandle() = Left(writable) } } @@ -115,4 +117,9 @@ object SeekableSource { implicit class ChannelSource(cn: SeekableByteChannel) extends SeekableSource { def getHandle() = Right(cn) } + class ChannelLengthSource(cn: SeekableByteChannel, length: Long) extends SeekableSource { + def getHandle() = Right(cn) + + override def contentLength: Option[Long] = Some(length) + } } diff --git a/os/src/StatOps.scala b/os/src/StatOps.scala index 958138a3..cf8695ef 100644 --- a/os/src/StatOps.scala +++ b/os/src/StatOps.scala @@ -74,6 +74,7 @@ object mtime extends Function1[Path, Long] { */ object set { def apply(p: Path, millis: Long) = { + checker.value.onWrite(p) Files.setLastModifiedTime(p.wrapped, FileTime.fromMillis(millis)) } } diff --git a/os/src/SubProcess.scala b/os/src/SubProcess.scala new file mode 100644 index 00000000..114e9731 --- /dev/null +++ b/os/src/SubProcess.scala @@ -0,0 +1,588 @@ +package os + +import java.io._ +import java.util.concurrent.TimeUnit + +import scala.language.implicitConversions +import java.util.concurrent.atomic.AtomicBoolean +import java.util.concurrent.LinkedTransferQueue +import java.util.concurrent.LinkedBlockingQueue +import scala.annotation.tailrec + +/** + * Parent type for single processes and process pipelines. + */ +sealed trait ProcessLike extends java.lang.AutoCloseable { + + /** + * The exit code of this [[ProcessLike]]. Conventionally, 0 exit code represents a + * successful termination, and non-zero exit code indicates a failure. + * + * Throws an exception if the subprocess has not terminated + */ + def exitCode(): Int + + /** + * Returns `true` if the [[ProcessLike]] is still running and has not terminated + */ + def isAlive(): Boolean + + /** + * Attempt to destroy the [[ProcessLike]] (gently), via the underlying JVM APIs + */ + def destroy(): Unit + + /** + * Force-destroys the [[ProcessLike]], via the underlying JVM APIs + */ + def destroyForcibly(): Unit + + /** + * Alias for [[destroy]], implemented for [[java.lang.AutoCloseable]] + */ + override def close(): Unit + + /** + * Wait up to `millis` for the [[ProcessLike]] to terminate, by default waits + * indefinitely. Returns `true` if the [[ProcessLike]] has terminated by the time + * this method returns. + */ + def waitFor(timeout: Long = -1): Boolean + + /** + * Wait up to `millis` for the [[ProcessLike]] to terminate and all stdout and stderr + * from the subprocess to be handled. By default waits indefinitely; if a time + * limit is given, explicitly destroys the [[ProcessLike]] if it has not completed by + * the time the timeout has occurred. + * + * By default, a process is destroyed by sending a `SIGTERM` signal, which allows an opportunity + * for it to clean up any resources it was using. If the process is unresponsive to this, a + * `SIGKILL` signal is sent `timeoutGracePeriod` milliseconds later. If `timeoutGracePeriod` is + * `0`, then there is no `SIGTERM`; if it is `-1`, there is no `SIGKILL` sent. + * + * @returns `true` when the process did not require explicit termination by either `SIGTERM` or `SIGKILL` and `false` otherwise. + * @note the issuing of `SIGTERM` instead of `SIGKILL` is implementation dependent on your JVM version. Pre-Java 9, no `SIGTERM` may be + * issued. Check the documentation for your JDK's `Process.destroy`. + */ + def join(timeout: Long = -1, timeoutGracePeriod: Long = 100): Boolean = { + val exitedCleanly = waitFor(timeout) + if (!exitedCleanly) { + assume( + timeout != -1, + "if the waitFor does not complete cleanly, this implies there is a timeout imposed, so the grace period is applicable" + ) + if (timeoutGracePeriod == -1) destroy() + else if (timeoutGracePeriod == 0) destroyForcibly() + else { + destroy() + if (!waitFor(timeoutGracePeriod)) { + destroyForcibly() + } + } + waitFor(-1) + } + joinPumperThreadsHook() + exitedCleanly + } + + @deprecatedOverriding("this method is now a forwarder, and should not be overriden", "0.10.4") + private[os] def join(timeout: Long): Boolean = join(timeout, timeoutGracePeriod = 100) + + /** + * A hook method used by `join` to close the input and output streams associated with the process, not for public consumption. + */ + private[os] def joinPumperThreadsHook(): Unit +} + +/** + * Represents a spawn subprocess that has started and may or may not have + * completed. + */ +@deprecatedInheritance( + "this class will be made final: if you are using it be aware that `join` has a new overloading", + "0.10.4" +) +class SubProcess( + val wrapped: java.lang.Process, + val inputPumperThread: Option[Thread], + val outputPumperThread: Option[Thread], + val errorPumperThread: Option[Thread], + val shutdownGracePeriod: Long, + val shutdownHookMonitorThread: Option[Thread] +) extends ProcessLike { + def this( + wrapped: java.lang.Process, + inputPumperThread: Option[Thread], + outputPumperThread: Option[Thread], + errorPumperThread: Option[Thread] + ) = this( + wrapped, + inputPumperThread, + outputPumperThread, + errorPumperThread, + 100, + None + ) + val stdin: SubProcess.InputStream = new SubProcess.InputStream(wrapped.getOutputStream) + val stdout: SubProcess.OutputStream = new SubProcess.OutputStream(wrapped.getInputStream) + val stderr: SubProcess.OutputStream = new SubProcess.OutputStream(wrapped.getErrorStream) + + /** + * The subprocess' exit code. Conventionally, 0 exit code represents a + * successful termination, and non-zero exit code indicates a failure. + * + * Throws an exception if the subprocess has not terminated + */ + def exitCode(): Int = wrapped.exitValue() + + /** + * Returns `true` if the subprocess is still running and has not terminated + */ + def isAlive(): Boolean = wrapped.isAlive + + /** + * Attempt to destroy the subprocess (gently), via the underlying JVM APIs + */ + def destroy(): Unit = destroy(shutdownGracePeriod = this.shutdownGracePeriod, async = false) + + def destroy( + shutdownGracePeriod: Long, + async: Boolean + ): Unit = destroy(shutdownGracePeriod, async, recursive = true) + + /** + * Destroys the subprocess, via the underlying JVM APIs, with configurable levels of + * aggressiveness: + * + * @param async set this to `true` if you do not want to wait on the subprocess exiting + * @param shutdownGracePeriod use this to override the default wait time for the subprocess + * to gracefully exit before destroying it forcibly. Defaults to the `shutdownGracePeriod` + * that was used to spawned the process, but can be set to 0 + * (i.e. force exit immediately) or -1 (i.e. never force exit) + * or anything in between. Typically defaults to 100 milliseconds. + * @param recursive whether or not to also destroy this process's own child processes and + * descendents. Each parent process is destroyed before its children, to + * ensure that when we are destroying the child processes no other children + * can be spawned concurrently + */ + def destroy( + shutdownGracePeriod: Long = this.shutdownGracePeriod, + async: Boolean = false, + recursive: Boolean = true + ): Unit = { + + def destroy0(p: ProcessHandle) = { + p.destroy() + if (!async) { + val now = System.currentTimeMillis() + + while ( + p.isAlive && (shutdownGracePeriod == -1 || System.currentTimeMillis() - now < shutdownGracePeriod) + ) { + Thread.sleep(1) + } + + if (p.isAlive) p.destroyForcibly() + } + } + def rec(p: ProcessHandle): Unit = { + destroy0(p) + p.children().forEach(c => rec(c)) + } + if (recursive) rec(wrapped.toHandle) + else destroy0(wrapped.toHandle) + } + + @deprecated("Use destroy(shutdownGracePeriod = 0)") + def destroyForcibly(): Unit = destroy(shutdownGracePeriod = 0) + + /** + * Alias for [[destroy]] + */ + def close() = wrapped.destroy() + + /** + * Wait up to `millis` for the subprocess to terminate, by default waits + * indefinitely. Returns `true` if the subprocess has terminated by the time + * this method returns. + */ + def waitFor(timeout: Long = -1): Boolean = { + if (timeout == -1) { + wrapped.waitFor() + true + } else { + wrapped.waitFor(timeout, TimeUnit.MILLISECONDS) + } + } + + private[os] def joinPumperThreadsHook(): Unit = { + outputPumperThread.foreach(_.join()) + errorPumperThread.foreach(_.join()) + } +} + +object SubProcess { + + /** + * The env passed by default to child processes. + * When `null`, the system environment is used. + */ + val env = new scala.util.DynamicVariable[Map[String, String]](null) + + /** + * A [[BufferedWriter]] with the underlying [[java.io.OutputStream]] exposed + * + * Note that all writes that occur through this class are thread-safe and + * synchronized. If you wish to perform writes without the synchronization + * overhead, you can use the underlying [[wrapped]] stream directly + */ + class InputStream(val wrapped: java.io.OutputStream) + extends java.io.OutputStream with DataOutput { + val data = new DataOutputStream(wrapped) + val buffered = new BufferedWriter(new OutputStreamWriter(wrapped)) + + def write(b: Int) = wrapped.write(b) + override def write(b: Array[Byte]): Unit = wrapped.write(b) + override def write(b: Array[Byte], off: Int, len: Int): Unit = wrapped.write(b, off, len) + + def writeBoolean(v: Boolean) = data.writeBoolean(v) + def writeByte(v: Int) = data.writeByte(v) + def writeShort(v: Int) = data.writeShort(v) + def writeChar(v: Int) = data.writeChar(v) + def writeInt(v: Int) = data.writeInt(v) + def writeLong(v: Long) = data.writeLong(v) + def writeFloat(v: Float) = data.writeFloat(v) + def writeDouble(v: Double) = data.writeDouble(v) + def writeBytes(s: String) = data.writeBytes(s) + def writeChars(s: String) = data.writeChars(s) + def writeUTF(s: String) = data.writeUTF(s) + + def writeLine(s: String) = buffered.write(s + "\n") + def write(s: String) = buffered.write(s) + def write(s: Array[Char]) = buffered.write(s) + + override def flush() = { + data.flush() + buffered.flush() + wrapped.flush() + } + override def close() = wrapped.close() + } + + /** + * A combination [[BufferedReader]] and [[java.io.InputStream]], this allows + * you to read both bytes and lines, without worrying about the buffer used + * for reading lines messing up your reading of bytes. + * + * Note that all reads that occur through this class are thread-safe and + * synchronized. If you wish to perform writes without the synchronization + * overhead, you can use the underlying [[wrapped]] stream directly + */ + class OutputStream(val wrapped: java.io.InputStream) + extends java.io.InputStream with DataInput with geny.ByteData { + val data = new DataInputStream(wrapped) + val buffered = new BufferedReader(new InputStreamReader(wrapped)) + + def read() = wrapped.read() + override def read(b: Array[Byte]) = wrapped.read(b) + override def read(b: Array[Byte], off: Int, len: Int) = wrapped.read(b, off, len) + + def readFully(b: Array[Byte]) = data.readFully(b) + def readFully(b: Array[Byte], off: Int, len: Int) = data.readFully(b, off, len) + + def skipBytes(n: Int) = ??? + def readBoolean() = data.readBoolean() + def readByte() = data.readByte() + def readUnsignedByte() = data.readUnsignedByte() + def readShort() = data.readShort() + def readUnsignedShort() = data.readUnsignedShort() + def readChar() = data.readChar() + def readInt() = data.readInt() + def readLong() = data.readLong() + def readFloat() = data.readFloat() + def readDouble() = data.readDouble() + // def readLine() = data.readLine() + def readUTF() = data.readUTF() + + def readLine() = buffered.readLine() + + def bytes: Array[Byte] = synchronized { + val out = new ByteArrayOutputStream() + Internals.transfer(wrapped, out) + out.toByteArray + } + + override def close() = wrapped.close() + } +} + +@deprecatedInheritance( + "this class will be made final: if you are using it be aware that `join` has a new overloading", + "0.10.4" +) +class ProcessPipeline( + val processes: Seq[SubProcess], + pipefail: Boolean, + brokenPipeQueue: Option[LinkedBlockingQueue[Int]] // to emulate pipeline behavior in jvm < 9 +) extends ProcessLike { + pipeline => + + /** + * String representation of the pipeline. + */ + def commandString = processes.map(_.wrapped.toString).mkString(" | ") + + private[os] val brokenPipeHandler: Option[Thread] = brokenPipeQueue.map { queue => + new Thread( + new Runnable { + override def run(): Unit = { + var pipelineRunning = true + while (pipelineRunning) { + val brokenPipeIndex = queue.take() + if (brokenPipeIndex == processes.length) { // Special case signaling finished pipeline + pipelineRunning = false + } else { + processes(brokenPipeIndex).destroyForcibly() + } + } + new Thread( + new Runnable { + override def run(): Unit = { + while (!pipeline.waitFor()) {} // handle spurious wakes + queue.put(processes.length) // Signal finished pipeline + } + }, + commandString + " pipeline termination handler" + ).start() + } + }, + commandString + " broken pipe handler" + ) + } + + /** + * The exit code of this [[ProcessPipeline]]. Conventionally, 0 exit code represents a + * successful termination, and non-zero exit code indicates a failure. Throws an exception + * if the subprocess has not terminated. + * + * If pipefail is set, the exit code is the first non-zero exit code of the pipeline. If no + * process in the pipeline has a non-zero exit code, the exit code is 0. + */ + override def exitCode(): Int = { + if (pipefail) + processes.map(_.exitCode()) + .filter(_ != 0) + .headOption + .getOrElse(0) + else + processes.last.exitCode() + } + + /** + * Returns `true` if the [[ProcessPipeline]] is still running and has not terminated. + * Any process in the pipeline can be alive for the pipeline to be alive. + */ + override def isAlive(): Boolean = { + processes.exists(_.isAlive()) + } + + /** + * Attempt to destroy the [[ProcessPipeline]] (gently), via the underlying JVM APIs. + * All processes in the pipeline are destroyed. + */ + override def destroy(): Unit = { + processes.foreach(_.destroy()) + } + + /** + * Force-destroys the [[ProcessPipeline]], via the underlying JVM APIs. + * All processes in the pipeline are force-destroyed. + */ + override def destroyForcibly(): Unit = { + processes.foreach(_.destroyForcibly()) + } + + /** + * Alias for [[destroy]], implemented for [[java.lang.AutoCloseable]]. + */ + override def close(): Unit = { + processes.foreach(_.close()) + } + + /** + * Wait up to `timeout` for the [[ProcessPipeline]] to terminate, by default waits + * indefinitely. Returns `true` if the [[ProcessPipeline]] has terminated by the time + * this method returns. + * + * Waits for each process one by one, while aggregating the total time waited. If + * `timeout` has passed before all processes have terminated, returns `false`. + */ + override def waitFor(timeout: Long = -1): Boolean = { + @tailrec + def waitForRec(startedAt: Long, processesLeft: Seq[SubProcess]): Boolean = processesLeft match { + case Nil => true + case head :: tail => + val elapsed = System.currentTimeMillis() - startedAt + val timeoutLeft = timeout - elapsed + if (timeoutLeft < 0) false + else if (head.waitFor(timeoutLeft)) waitForRec(startedAt, tail) + else false + } + + if (timeout == -1) { + processes.forall(_.waitFor()) + } else { + val timeNow = System.currentTimeMillis() + waitForRec(timeNow, processes) + } + } + + /** + * Wait up to `timeout` for the [[ProcessPipeline]] to terminate all the processes + * in pipeline. By default waits indefinitely; if a time limit is given, explicitly + * destroys each process if it has not completed by the time the timeout has occurred. + * + * By default, the processes are destroyed by sending `SIGTERM` signals, which allows an opportunity + * for them to clean up any resources it. If any process is unresponsive to this, a + * `SIGKILL` signal is sent `timeoutGracePeriod` milliseconds later. If `timeoutGracePeriod` is + * `0`, then there is no `SIGTERM`; if it is `-1`, there is no `SIGKILL` sent. + * + * @returns `true` when the processes did not require explicit termination by either `SIGTERM` or `SIGKILL` and `false` otherwise. + * @note the issuing of `SIGTERM` instead of `SIGKILL` is implementation dependent on your JVM version. Pre-Java 9, no `SIGTERM` may be + * issued. Check the documentation for your JDK's `Process.destroy`. + */ + override def join(timeout: Long = -1, timeoutGracePeriod: Long = 100): Boolean = { + // in this case, the grace period does not apply, so fine + if (timeout == -1) { + processes.forall(_.join()) + } else super.join(timeout, timeoutGracePeriod) + } + + private[os] def joinPumperThreadsHook(): Unit = { + processes.foreach(_.joinPumperThreadsHook()) + } +} + +/** + * Represents the configuration of a SubProcess's input stream. Can either be + * [[os.Inherit]], [[os.Pipe]], [[os.Path]] or a [[os.Source]] + */ +trait ProcessInput { + def redirectFrom: ProcessBuilder.Redirect + def processInput(stdin: => SubProcess.InputStream): Option[Runnable] +} +object ProcessInput { + implicit def makeSourceInput[T](r: T)(implicit f: T => Source): ProcessInput = SourceInput(f(r)) + implicit def makePathRedirect(p: Path): ProcessInput = PathRedirect(p) + case class SourceInput(r: Source) extends ProcessInput { + def redirectFrom = ProcessBuilder.Redirect.PIPE + + def processInput(stdin: => SubProcess.InputStream): Option[Runnable] = Some { + new Runnable { + def run() = { + r.writeBytesTo(stdin) + stdin.close() + } + } + } + } +} + +/** + * Represents the configuration of a SubProcess's output or error stream. Can + * either be [[os.Inherit]], [[os.Pipe]], [[os.Path]] or a [[os.ProcessOutput]] + */ +trait ProcessOutput { + def redirectTo: ProcessBuilder.Redirect + def processOutput(out: => SubProcess.OutputStream): Option[Runnable] +} +object ProcessOutput { + implicit def makePathRedirect(p: Path): ProcessOutput = PathRedirect(p) + + def apply(f: (Array[Byte], Int) => Unit) = ReadBytes(f) + + case class ReadBytes(f: (Array[Byte], Int) => Unit) + extends ProcessOutput { + def redirectTo = ProcessBuilder.Redirect.PIPE + def processOutput(out: => SubProcess.OutputStream) = Some { + new Runnable { def run(): Unit = os.Internals.transfer0(out, f) } + } + } + + case class Readlines(f: String => Unit) + extends ProcessOutput { + def redirectTo = ProcessBuilder.Redirect.PIPE + def processOutput(out: => SubProcess.OutputStream) = Some { + new Runnable { + def run(): Unit = { + val buffered = new BufferedReader(new InputStreamReader(out)) + while ({ + val lineOpt = + try { + buffered.readLine() match { + case null => None + case line => Some(line) + } + } catch { case e: Throwable => None } + lineOpt match { + case None => false + case Some(s) => + f(s) + true + } + }) () + } + } + } + } +} + +/** + * Inherit the input/output stream from the current process. + * + * Can be overriden on a thread local basis for the various + * kinds of streams (stdin, stdout, stderr) via [[in]], [[out]], and [[err]] + */ +object Inherit extends ProcessInput with ProcessOutput { + def redirectTo = ProcessBuilder.Redirect.INHERIT + def redirectFrom = ProcessBuilder.Redirect.INHERIT + def processInput(stdin: => SubProcess.InputStream) = None + def processOutput(stdin: => SubProcess.OutputStream) = None + + val in = new scala.util.DynamicVariable[ProcessInput](Inherit) + val out = new scala.util.DynamicVariable[ProcessOutput](Inherit) + val err = new scala.util.DynamicVariable[ProcessOutput](Inherit) +} + +/** + * Inherit the input/output stream from the current process. + * Identical of [[os.Inherit]], except it cannot be redirected globally + */ +object InheritRaw extends ProcessInput with ProcessOutput { + def redirectTo = ProcessBuilder.Redirect.INHERIT + def redirectFrom = ProcessBuilder.Redirect.INHERIT + def processInput(stdin: => SubProcess.InputStream) = None + def processOutput(stdin: => SubProcess.OutputStream) = None +} + +/** + * Pipe the input/output stream to the current process to be used via + * `java.lang.Process#{getInputStream,getOutputStream,getErrorStream}` + */ +object Pipe extends ProcessInput with ProcessOutput { + def redirectTo = ProcessBuilder.Redirect.PIPE + def redirectFrom = ProcessBuilder.Redirect.PIPE + def processInput(stdin: => SubProcess.InputStream) = None + def processOutput(stdin: => SubProcess.OutputStream) = None +} + +case class PathRedirect(p: Path) extends ProcessInput with ProcessOutput { + def redirectFrom = ProcessBuilder.Redirect.from(p.toIO) + def processInput(stdin: => SubProcess.InputStream) = None + def redirectTo = ProcessBuilder.Redirect.to(p.toIO) + def processOutput(out: => SubProcess.OutputStream) = None +} +case class PathAppendRedirect(p: Path) extends ProcessOutput { + def redirectTo = ProcessBuilder.Redirect.appendTo(p.toIO) + def processOutput(out: => SubProcess.OutputStream) = None +} diff --git a/os/src/TempOps.scala b/os/src/TempOps.scala index 900fe6b7..f31f16d7 100644 --- a/os/src/TempOps.scala +++ b/os/src/TempOps.scala @@ -28,14 +28,15 @@ object temp { deleteOnExit: Boolean = true, perms: PermSet = null ): Path = { - import collection.JavaConverters._ val permArray: Array[FileAttribute[_]] = if (perms == null) Array.empty else Array(PosixFilePermissions.asFileAttribute(perms.toSet())) val nioPath = dir match { case null => java.nio.file.Files.createTempFile(prefix, suffix, permArray: _*) - case _ => java.nio.file.Files.createTempFile(dir.wrapped, prefix, suffix, permArray: _*) + case _ => + checker.value.onWrite(dir) + java.nio.file.Files.createTempFile(dir.wrapped, prefix, suffix, permArray: _*) } if (contents != null) write.over(Path(nioPath), contents) @@ -63,7 +64,9 @@ object temp { val nioPath = dir match { case null => java.nio.file.Files.createTempDirectory(prefix, permArray: _*) - case _ => java.nio.file.Files.createTempDirectory(dir.wrapped, prefix, permArray: _*) + case _ => + checker.value.onWrite(dir) + java.nio.file.Files.createTempDirectory(dir.wrapped, prefix, permArray: _*) } if (deleteOnExit) nioPath.toFile.deleteOnExit() diff --git a/os/src/ZipOps.scala b/os/src/ZipOps.scala new file mode 100644 index 00000000..317b27a8 --- /dev/null +++ b/os/src/ZipOps.scala @@ -0,0 +1,320 @@ +package os + +import java.net.URI +import java.nio.file.{FileSystem, FileSystems, Files} +import java.nio.file.attribute.{BasicFileAttributeView, FileTime, PosixFilePermissions} +import java.util.zip.{ZipEntry, ZipFile, ZipInputStream, ZipOutputStream} +import scala.collection.JavaConverters._ +import scala.util.matching.Regex + +object zip { + + /** + * Opens a zip file as a filesystem root that you can operate on using `os.*` APIs. Note + * that you need to call `close()` on the returned `ZipRoot` when you are done with it, to + * avoid leaking filesystem resources + */ + def open(path: Path): ZipRoot = { + new ZipRoot(FileSystems.newFileSystem( + new URI("jar", path.wrapped.toUri.toString, null), + Map("create" -> "true").asJava + )) + } + + /** + * Zips the provided list of files and directories into a single ZIP archive. + * + * If `dest` already exists and is a zip, performs modifications to `dest` in place + * rather than creating a new zip. + * + * @param dest The path to the destination ZIP file. + * @param sources A list of paths to files and directories to be zipped. Defaults to an empty list. + * @param excludePatterns A list of regular expression patterns to exclude files from the ZIP archive. Defaults to an empty list. + * @param includePatterns A list of regular expression patterns to include files in the ZIP archive. Defaults to an empty list (includes all files). + * @param preserveMtimes Whether to preserve modification times (mtimes) of the files. + * @param deletePatterns A list of regular expression patterns to delete files from an existing ZIP archive before appending new ones. + * @param compressionLevel number from 0-9, where 0 is no compression and 9 is best compression. Defaults to -1 (default compression) + * @return The path to the created ZIP archive. + */ + def apply( + dest: os.Path, + sources: Seq[ZipSource] = List(), + excludePatterns: Seq[Regex] = List(), + includePatterns: Seq[Regex] = List(), + preserveMtimes: Boolean = false, + deletePatterns: Seq[Regex] = List(), + compressionLevel: Int = java.util.zip.Deflater.DEFAULT_COMPRESSION + ): os.Path = { + checker.value.onWrite(dest) + // check read preemptively in case "dest" is created + for (source <- sources) checker.value.onRead(source.src) + + if (os.exists(dest)) { + val opened = open(dest) + try { + for { + openedPath <- os.walk(opened) + if anyPatternsMatch(openedPath.relativeTo(opened).toString, deletePatterns) + } os.remove.all(openedPath) + + createNewZip0( + sources, + excludePatterns, + includePatterns, + (path, sub) => { + os.copy(path, opened / sub, createFolders = true) + if (!preserveMtimes) { + os.mtime.set(opened / sub, 0) + // This is the only way we can properly zero out filesystem metadata within the + // Zip file filesystem; `os.mtime.set` is not enough + val view = + Files.getFileAttributeView((opened / sub).toNIO, classOf[BasicFileAttributeView]) + view.setTimes(FileTime.fromMillis(0), FileTime.fromMillis(0), FileTime.fromMillis(0)) + } + } + ) + } finally opened.close() + } else { + val f = Files.newOutputStream(dest.toNIO) + try createNewZip( + sources, + excludePatterns, + includePatterns, + preserveMtimes, + compressionLevel, + f + ) + finally f.close() + } + dest + } + + private def createNewZip0( + sources: Seq[ZipSource], + excludePatterns: Seq[Regex], + includePatterns: Seq[Regex], + makeZipEntry0: (os.Path, os.SubPath) => Unit + ): Unit = { + sources.foreach { source => + if (os.isDir(source.src)) { + for (path <- os.walk(source.src)) { + if (os.isFile(path) && shouldInclude(path.toString, excludePatterns, includePatterns)) { + makeZipEntry0(path, source.dest.getOrElse(os.sub) / path.subRelativeTo(source.src)) + } + } + } else if (shouldInclude(source.src.last, excludePatterns, includePatterns)) { + makeZipEntry0(source.src, source.dest.getOrElse(os.sub / source.src.last)) + } + } + } + private def createNewZip( + sources: Seq[ZipSource], + excludePatterns: Seq[Regex], + includePatterns: Seq[Regex], + preserveMtimes: Boolean, + compressionLevel: Int, + out: java.io.OutputStream + ): Unit = { + val zipOut = new ZipOutputStream(out) + zipOut.setLevel(compressionLevel) + + try { + createNewZip0( + sources, + excludePatterns, + includePatterns, + (path, sub) => makeZipEntry(path, sub, preserveMtimes, zipOut) + ) + } finally { + zipOut.close() + } + } + + private[os] def anyPatternsMatch(fileName: String, patterns: Seq[Regex]) = { + patterns.exists(_.findFirstIn(fileName).isDefined) + } + private[os] def shouldInclude( + fileName: String, + excludePatterns: Seq[Regex], + includePatterns: Seq[Regex] + ): Boolean = { + val isExcluded = anyPatternsMatch(fileName, excludePatterns) + val isIncluded = includePatterns.isEmpty || anyPatternsMatch(fileName, includePatterns) + !isExcluded && isIncluded + } + + private def makeZipEntry( + file: os.Path, + sub: os.SubPath, + preserveMtimes: Boolean, + zipOut: ZipOutputStream + ) = { + + val mtimeOpt = if (preserveMtimes) Some(os.mtime(file)) else None + + val fis = if (os.isFile(file)) Some(os.read.inputStream(file)) else None + try makeZipEntry0(sub, fis, mtimeOpt, zipOut) + finally fis.foreach(_.close()) + } + + private def makeZipEntry0( + sub: os.SubPath, + is: Option[java.io.InputStream], + preserveMtimes: Option[Long], + zipOut: ZipOutputStream + ) = { + val zipEntry = new ZipEntry(sub.toString) + + preserveMtimes match { + case Some(mtime) => zipEntry.setTime(mtime) + case None => zipEntry.setTime(0) + } + + zipOut.putNextEntry(zipEntry) + is.foreach(os.Internals.transfer(_, zipOut, close = false)) + } + + /** + * Zips a folder recursively and returns a geny.Writable for streaming the ZIP data. + * + * @param source The path to the folder to be zipped. + * @param destination The path to the destination ZIP file (optional). If not provided, a temporary ZIP file will be created. + * @param appendToExisting Whether to append the listed paths to an existing ZIP file (if it exists). Defaults to false. + * @param excludePatterns A list of regular expression patterns to exclude files during zipping. Defaults to an empty list. + * @param includePatterns A list of regular expression patterns to include files in the ZIP archive. Defaults to an empty list (includes all files). + * @param preserveMtimes Whether to preserve modification times (mtimes) of the files. + * @return A geny.Writable object for writing the ZIP data. + */ + def stream( + sources: Seq[ZipSource], + excludePatterns: Seq[Regex] = List(), + includePatterns: Seq[Regex] = List(), + preserveMtimes: Boolean = false, + compressionLevel: Int = java.util.zip.Deflater.DEFAULT_COMPRESSION + ): geny.Writable = { + (outputStream: java.io.OutputStream) => + { + createNewZip( + sources, + excludePatterns, + includePatterns, + preserveMtimes, + compressionLevel, + outputStream + ) + } + } + + /** + * A filesystem root representing a zip file + */ + class ZipRoot private[os] (fs: FileSystem) extends Path(fs.getRootDirectories.iterator().next()) + with AutoCloseable { + def close(): Unit = fs.close() + } + + /** + * A file or folder you want to include in a zip file. + */ + class ZipSource private[os] (val src: os.Path, val dest: Option[os.SubPath]) + object ZipSource { + implicit def fromPath(src: os.Path): ZipSource = new ZipSource(src, None) + implicit def fromSeqPath(srcs: Seq[os.Path]): Seq[ZipSource] = srcs.map(fromPath) + implicit def fromPathTuple(tuple: (os.Path, os.SubPath)): ZipSource = + new ZipSource(tuple._1, Some(tuple._2)) + } +} + +object unzip { + + /** + * Lists the contents of the given zip file without extracting it + */ + def list( + source: os.Path, + excludePatterns: Seq[Regex] = List(), + includePatterns: Seq[Regex] = List() + ): Generator[os.SubPath] = { + for { + (zipEntry, zipInputStream) <- + streamRaw(os.read.stream(source), excludePatterns, includePatterns) + } yield os.SubPath(zipEntry.getName) + } + + /** + * Extract the given zip file into the destination directory + * + * @param source An `os.Path` containing a zip file + * @param dest The path to the destination directory for extracted files. + * @param excludePatterns A list of regular expression patterns to exclude files during extraction. (Optional) + */ + def apply( + source: os.Path, + dest: os.Path, + excludePatterns: Seq[Regex] = List(), + includePatterns: Seq[Regex] = List() + ): os.Path = { + stream(os.read.stream(source), dest, excludePatterns, includePatterns) + dest + } + + /** + * Unzips a ZIP data stream represented by a geny.Readable and extracts it to a destination directory. + * + * @param source A geny.Readable object representing the ZIP data stream. + * @param dest The path to the destination directory for extracted files. + * @param excludePatterns A list of regular expression patterns to exclude files during extraction. (Optional) + */ + def stream( + source: geny.Readable, + dest: os.Path, + excludePatterns: Seq[Regex] = List(), + includePatterns: Seq[Regex] = List() + ): Unit = { + checker.value.onWrite(dest) + for ((zipEntry, zipInputStream) <- streamRaw(source, excludePatterns, includePatterns)) { + val newFile = dest / os.SubPath(zipEntry.getName) + if (zipEntry.isDirectory) os.makeDir.all(newFile) + else { + val outputStream = os.write.outputStream(newFile, createFolders = true) + os.Internals.transfer(zipInputStream, outputStream, close = false) + outputStream.close() + } + } + } + + /** + * Low-level api that streams the contents of the given zip file: takes a `geny.Reaable` + * providing the bytes of the zip file, and returns a `geny.Generator` containing `ZipEntry`s + * and the underlying `ZipInputStream` representing the entries in the zip file. + */ + def streamRaw( + source: geny.Readable, + excludePatterns: Seq[Regex] = List(), + includePatterns: Seq[Regex] = List() + ): geny.Generator[(ZipEntry, java.io.InputStream)] = { + new Generator[(ZipEntry, java.io.InputStream)] { + override def generate(handleItem: ((ZipEntry, java.io.InputStream)) => Generator.Action) + : Generator.Action = { + var lastAction: Generator.Action = Generator.Continue + source.readBytesThrough { inputStream => + val zipInputStream = new ZipInputStream(inputStream) + try { + var zipEntry: ZipEntry = zipInputStream.getNextEntry + while (lastAction == Generator.Continue && zipEntry != null) { + // Skip files that match the exclusion patterns + if (os.zip.shouldInclude(zipEntry.getName, excludePatterns, includePatterns)) { + lastAction = handleItem((zipEntry, zipInputStream)) + } + zipEntry = zipInputStream.getNextEntry + } + } finally { + zipInputStream.closeEntry() + zipInputStream.close() + } + } + lastAction + } + } + } +} diff --git a/os/src/experimental.scala b/os/src/experimental.scala new file mode 100644 index 00000000..3294cfd2 --- /dev/null +++ b/os/src/experimental.scala @@ -0,0 +1,8 @@ +package os + +import scala.annotation.StaticAnnotation + +/** + * Annotation to mark experimental API, which is not guaranteed to stay. + */ +class experimental extends StaticAnnotation {} diff --git a/os/src/package.scala b/os/src/package.scala new file mode 100644 index 00000000..a854b906 --- /dev/null +++ b/os/src/package.scala @@ -0,0 +1,79 @@ +import scala.language.implicitConversions +import java.nio.file.FileSystem +import java.nio.file.FileSystems +import java.nio.file.Paths +import scala.util.DynamicVariable + +package object os extends ResourceApi { + type Generator[+T] = geny.Generator[T] + val Generator = geny.Generator + implicit def GlobSyntax(s: StringContext): GlobInterpolator = new GlobInterpolator(s) + + /** + * The root of the filesystem + */ + val root: Path = Path(java.nio.file.Paths.get(".").toAbsolutePath.getRoot) + + def root(root: String, fileSystem: FileSystem = FileSystems.getDefault()): Path = { + val path = Path(fileSystem.getPath(root)) + assert(path.root == root || path.root == root.replace('/', '\\'), s"$root is not a root path") + path + } + + // See https://github.com/com-lihaoyi/os-lib/pull/239 + // and https://github.com/lightbend/mima/issues/794 + // why the need the inner object to preserve binary compatibility + private object _home { + lazy val value = Path(System.getProperty("user.home")) + } + + /** + * The user's home directory + */ + def home: Path = _home.value + + /** + * The current working directory for this process. + */ + def pwd: Path = dynamicPwdFunction.value() + + private val pwd0 = os.Path(java.nio.file.Paths.get(".").toAbsolutePath) + + /** + * Used to override `pwd` within a certain scope with a generated value + */ + val dynamicPwdFunction: DynamicVariable[() => Path] = new DynamicVariable(() => dynamicPwd.value) + + /** + * Used to override `pwd` within a certain scope with a fixed value + */ + val dynamicPwd: DynamicVariable[Path] = new DynamicVariable(pwd0) + + val up: RelPath = RelPath.up + + val rel: RelPath = RelPath.rel + + val sub: SubPath = SubPath.sub + + @experimental + val checker: DynamicVariable[Checker] = new DynamicVariable[Checker](Checker.Nop) + + /** + * Extractor to let you easily pattern match on [[os.Path]]s. Lets you do + * + * {{{ + * @ val base/segment/filename = pwd + * base: Path = Path(Vector("Users", "haoyi", "Dropbox (Personal)")) + * segment: String = "Workspace" + * filename: String = "Ammonite" + * }}} + * + * To break apart a path and extract various pieces of it. + */ + object / { + def unapply(p: Path): Option[(Path, String)] = { + if (p.segmentCount != 0) Some((p / up, p.last)) + else None + } + } +} diff --git a/os/test/resources/restricted/File.txt b/os/test/resources/restricted/File.txt new file mode 100644 index 00000000..c295cb70 --- /dev/null +++ b/os/test/resources/restricted/File.txt @@ -0,0 +1 @@ +I am a restricted cow \ No newline at end of file diff --git a/os/test/resources/restricted/Multi Line.txt b/os/test/resources/restricted/Multi Line.txt new file mode 100644 index 00000000..03a7b2c7 --- /dev/null +++ b/os/test/resources/restricted/Multi Line.txt @@ -0,0 +1,4 @@ +I am restricted cow +Hear me moo +I weigh twice as much as you +And I look good on the barbecue \ No newline at end of file diff --git a/os/test/resources/restricted/folder1/one.txt b/os/test/resources/restricted/folder1/one.txt new file mode 100644 index 00000000..7959e0c6 --- /dev/null +++ b/os/test/resources/restricted/folder1/one.txt @@ -0,0 +1 @@ +Contents of restricted folder one \ No newline at end of file diff --git a/os/test/resources/restricted/folder2/nestedA/a.txt b/os/test/resources/restricted/folder2/nestedA/a.txt new file mode 100644 index 00000000..27ce3da0 --- /dev/null +++ b/os/test/resources/restricted/folder2/nestedA/a.txt @@ -0,0 +1 @@ +Contents of restricted nested A \ No newline at end of file diff --git a/os/test/resources/restricted/folder2/nestedB/b.txt b/os/test/resources/restricted/folder2/nestedB/b.txt new file mode 100644 index 00000000..f7539d86 --- /dev/null +++ b/os/test/resources/restricted/folder2/nestedB/b.txt @@ -0,0 +1 @@ +Contents of restricted nested B \ No newline at end of file diff --git a/os/test/resources/restricted/misc/broken-symlink b/os/test/resources/restricted/misc/broken-symlink new file mode 100644 index 00000000..e69de29b diff --git a/os/test/resources/restricted/misc/file-symlink b/os/test/resources/restricted/misc/file-symlink new file mode 100644 index 00000000..c295cb70 --- /dev/null +++ b/os/test/resources/restricted/misc/file-symlink @@ -0,0 +1 @@ +I am a restricted cow \ No newline at end of file diff --git a/os/test/resources/restricted/misc/folder-symlink b/os/test/resources/restricted/misc/folder-symlink new file mode 120000 index 00000000..6ff69ba0 --- /dev/null +++ b/os/test/resources/restricted/misc/folder-symlink @@ -0,0 +1 @@ +../folder1 \ No newline at end of file diff --git a/os/test/src-jvm/ExampleTests.scala b/os/test/src-jvm/ExampleTests.scala index fbb2335e..cfc38f9d 100644 --- a/os/test/src-jvm/ExampleTests.scala +++ b/os/test/src-jvm/ExampleTests.scala @@ -9,7 +9,7 @@ object ExampleTests extends TestSuite { test("splash") - TestUtil.prep { wd => if (Unix()) { // Make sure working directory exists and is empty - val wd = os.pwd / "out" / "splash" + val wd = os.pwd / "out/splash" os.remove.all(wd) os.makeDir.all(wd) @@ -95,8 +95,8 @@ object ExampleTests extends TestSuite { // ignore multiline (second file) because its size varies largestThree.filterNot(_._2.last == "Multi Line.txt") ==> Seq( - (711, wd / "misc" / "binary.png"), - (22, wd / "folder1" / "one.txt") + (711, wd / "misc/binary.png"), + (22, wd / "folder1/one.txt") ) } @@ -115,9 +115,9 @@ object ExampleTests extends TestSuite { } test("comparison") { - os.remove.all(os.pwd / "out" / "scratch" / "folder" / "thing" / "file") + os.remove.all(os.pwd / "out/scratch/folder/thing/file") os.write( - os.pwd / "out" / "scratch" / "folder" / "thing" / "file", + os.pwd / "out/scratch/folder/thing/file", "Hello!", createFolders = true ) @@ -135,16 +135,16 @@ object ExampleTests extends TestSuite { } removeAll("out/scratch/folder/thing") - assert(os.list(os.pwd / "out" / "scratch" / "folder").toSeq == Nil) + assert(os.list(os.pwd / "out/scratch/folder").toSeq == Nil) os.write( - os.pwd / "out" / "scratch" / "folder" / "thing" / "file", + os.pwd / "out/scratch/folder/thing/file", "Hello!", createFolders = true ) - os.remove.all(os.pwd / "out" / "scratch" / "folder" / "thing") - assert(os.list(os.pwd / "out" / "scratch" / "folder").toSeq == Nil) + os.remove.all(os.pwd / "out/scratch/folder/thing") + assert(os.list(os.pwd / "out/scratch/folder").toSeq == Nil) } test("constructingPaths") { @@ -155,13 +155,13 @@ object ExampleTests extends TestSuite { val wd = os.pwd // A path nested inside `wd` - wd / "folder" / "file" + wd / "folder/file" // A path starting from the root - os.root / "folder" / "file" + os.root / "folder/file" // A path with spaces or other special characters - wd / "My Folder" / "My File.txt" + wd / "My Folder/My File.txt" // Up one level from the wd wd / os.up @@ -171,17 +171,19 @@ object ExampleTests extends TestSuite { } test("newPath") { - val target = os.pwd / "out" / "scratch" + val target = os.pwd / "out/scratch" + val target2: os.Path = "/out/scratch" // literal syntax } test("relPaths") { // The path "folder/file" - val rel1 = os.rel / "folder" / "file" - val rel2 = os.rel / "folder" / "file" + val rel1 = os.rel / "folder/file" + val rel2 = os.rel / "folder/file" + val rel3: os.RelPath = "folder/file" // literal syntax // The relative difference between two paths - val target = os.pwd / "out" / "scratch" / "file" - assert((target relativeTo os.pwd) == os.rel / "out" / "scratch" / "file") + val target = os.pwd / "out/scratch/file" + assert((target relativeTo os.pwd) == os.rel / "out/scratch/file") // `up`s get resolved automatically val minus = os.pwd relativeTo target @@ -195,66 +197,67 @@ object ExampleTests extends TestSuite { test("subPaths") { // The path "folder/file" - val sub1 = os.sub / "folder" / "file" - val sub2 = os.sub / "folder" / "file" + val sub1 = os.sub / "folder/file" + val sub2 = os.sub / "folder/file" + val sub3 = "folder/file" // literal syntax // The relative difference between two paths - val target = os.pwd / "out" / "scratch" / "file" - assert((target subRelativeTo os.pwd) == os.sub / "out" / "scratch" / "file") + val target = os.pwd / "out/scratch/file" + assert((target subRelativeTo os.pwd) == os.sub / "out/scratch/file") // Converting os.RelPath to os.SubPath - val rel3 = os.rel / "folder" / "file" - val sub3 = rel3.asSubPath + val rel3 = os.rel / "folder/file" + val sub4 = rel3.asSubPath // `up`s are not allowed in sub paths intercept[Exception](os.pwd subRelativeTo target) } test("relSubPathEquality") { assert( - (os.sub / "hello" / "world") == (os.rel / "hello" / "world"), + (os.sub / "hello/world") == (os.rel / "hello/world"), os.sub == os.rel ) } test("relPathCombine") { - val target = os.pwd / "out" / "scratch" / "file" + val target = os.pwd / "out/scratch/file" val rel = target relativeTo os.pwd - val newBase = os.root / "code" / "server" - assert(newBase / rel == os.root / "code" / "server" / "out" / "scratch" / "file") + val newBase = os.root / "code/server" + assert(newBase / rel == os.root / "code/server/out/scratch/file") } test("subPathCombine") { - val target = os.pwd / "out" / "scratch" / "file" + val target = os.pwd / "out/scratch/file" val sub = target subRelativeTo os.pwd - val newBase = os.root / "code" / "server" + val newBase = os.root / "code/server" assert( - newBase / sub == os.root / "code" / "server" / "out" / "scratch" / "file", - sub / sub == os.sub / "out" / "scratch" / "file" / "out" / "scratch" / "file" + newBase / sub == os.root / "code/server/out/scratch/file", + sub / sub == os.sub / "out/scratch/file/out/scratch/file" ) } test("pathUp") { - val target = os.root / "out" / "scratch" / "file" - assert(target / os.up == os.root / "out" / "scratch") + val target = os.root / "out/scratch/file" + assert(target / os.up == os.root / "out/scratch") } test("relPathUp") { - val target = os.rel / "out" / "scratch" / "file" - assert(target / os.up == os.rel / "out" / "scratch") + val target = os.rel / "out/scratch/file" + assert(target / os.up == os.rel / "out/scratch") } test("relPathUp") { - val target = os.sub / "out" / "scratch" / "file" - assert(target / os.up == os.sub / "out" / "scratch") + val target = os.sub / "out/scratch/file" + assert(target / os.up == os.sub / "out/scratch") } test("canonical") { if (Unix()) { - assert((os.root / "folder" / "file" / os.up).toString == "/folder") + assert((os.root / "folder/file" / os.up).toString == "/folder") // not "/folder/file/.." - assert((os.rel / "folder" / "file" / os.up).toString == "folder") + assert((os.rel / "folder/file" / os.up).toString == "folder") // not "folder/file/.." } } test("findWc") { - val wd = os.pwd / "os" / "test" / "resources" / "test" + val wd = os.Path(sys.env("OS_TEST_RESOURCE_FOLDER")) / "test" // find . -name '*.txt' | xargs wc -l val lines = os.walk(wd) diff --git a/os/test/src-jvm/OpTestsJvmOnly.scala b/os/test/src-jvm/OpTestsJvmOnly.scala index 5952f695..1d516ec1 100644 --- a/os/test/src-jvm/OpTestsJvmOnly.scala +++ b/os/test/src-jvm/OpTestsJvmOnly.scala @@ -10,271 +10,85 @@ import java.nio.charset.Charset object OpTestsJvmOnly extends TestSuite { val tests = Tests { - val res = os.pwd / "os" / "test" / "resources" / "test" - - test("lsR") { - os.walk(res).foreach(println) - intercept[java.nio.file.NoSuchFileException]( - os.walk(os.pwd / "out" / "scratch" / "nonexistent") - ) - assert( - os.walk(res / "folder2" / "nestedB") == Seq(res / "folder2" / "nestedB" / "b.txt"), - os.walk(res / "folder2").toSet == Set( - res / "folder2" / "nestedA", - res / "folder2" / "nestedA" / "a.txt", - res / "folder2" / "nestedB", - res / "folder2" / "nestedB" / "b.txt" - ) - ) - } + val res = os.Path(sys.env("OS_TEST_RESOURCE_FOLDER")) / "test" + val testFolder = os.pwd / "out/scratch/test" test("lsRecPermissions") { if (Unix()) { - assert(os.walk(os.root / "var" / "run").nonEmpty) + assert(os.walk(os.root / "var/run").nonEmpty) } } test("readResource") { test("positive") { test("absolute") { - val contents = os.read(os.resource / "test" / "os" / "folder" / "file.txt") + val contents = os.read(os.resource / "test/os/folder/file.txt") assert(contents.contains("file contents lols")) val cl = getClass.getClassLoader - val contents2 = os.read(os.resource(cl) / "test" / "os" / "folder" / "file.txt") + val contents2 = os.read(os.resource(cl) / "test/os/folder/file.txt") assert(contents2.contains("file contents lols")) } test("relative") { val cls = classOf[_root_.test.os.Testing] - val contents = os.read(os.resource(cls) / "folder" / "file.txt") + val contents = os.read(os.resource(cls) / "folder/file.txt") assert(contents.contains("file contents lols")) - val contents2 = os.read(os.resource(getClass) / "folder" / "file.txt") + val contents2 = os.read(os.resource(getClass) / "folder/file.txt") assert(contents2.contains("file contents lols")) } } test("negative") { test - intercept[os.ResourceNotFoundException] { - os.read(os.resource / "folder" / "file.txt") + os.read(os.resource / "folder/file.txt") } test - intercept[os.ResourceNotFoundException] { os.read( - os.resource(classOf[_root_.test.os.Testing]) / "test" / "os" / "folder" / "file.txt" + os.resource(classOf[_root_.test.os.Testing]) / "test/os/folder/file.txt" ) } test - intercept[os.ResourceNotFoundException] { - os.read(os.resource(getClass) / "test" / "os" / "folder" / "file.txt") + os.read(os.resource(getClass) / "test/os/folder/file.txt") } test - intercept[os.ResourceNotFoundException] { - os.read(os.resource(getClass.getClassLoader) / "folder" / "file.txt") + os.read(os.resource(getClass.getClassLoader) / "folder/file.txt") } } } - test("Mutating") { - val testFolder = os.pwd / "out" / "scratch" / "test" - os.remove.all(testFolder) - os.makeDir.all(testFolder) - test("cp") { - val d = testFolder / "copying" - test("basic") { - assert( - !os.exists(d / "folder"), - !os.exists(d / "file") - ) - os.makeDir.all(d / "folder") - os.write(d / "file", "omg") - assert( - os.exists(d / "folder"), - os.exists(d / "file"), - os.read(d / "file") == "omg" - ) - os.copy(d / "folder", d / "folder2") - os.copy(d / "file", d / "file2") - - assert( - os.exists(d / "folder"), - os.exists(d / "file"), - os.read(d / "file") == "omg", - os.exists(d / "folder2"), - os.exists(d / "file2"), - os.read(d / "file2") == "omg" - ) - } - test("deep") { - os.write(d / "folderA" / "folderB" / "file", "Cow", createFolders = true) - os.copy(d / "folderA", d / "folderC") - assert(os.read(d / "folderC" / "folderB" / "file") == "Cow") - } - test("merging") { - val mergeDir = d / "merge" - os.write(mergeDir / "folderA" / "folderB" / "file", "Cow", createFolders = true) - os.write(mergeDir / "folderC" / "file", "moo", createFolders = true) - os.copy(mergeDir / "folderA", mergeDir / "folderC", mergeFolders = true) - assert(os.read(mergeDir / "folderC" / "folderB" / "file") == "Cow") - assert(os.read(mergeDir / "folderC" / "file") == "moo") - } - } - test("mv") { - test("basic") { - val d = testFolder / "moving" - os.makeDir.all(d / "folder") - assert(os.list(d) == Seq(d / "folder")) - os.move(d / "folder", d / "folder2") - assert(os.list(d) == Seq(d / "folder2")) - } - test("shallow") { - val d = testFolder / "moving2" - os.makeDir(d) - os.write(d / "A.scala", "AScala") - os.write(d / "B.scala", "BScala") - os.write(d / "A.py", "APy") - os.write(d / "B.py", "BPy") - def fileSet = os.list(d).map(_.last).toSet - assert(fileSet == Set("A.scala", "B.scala", "A.py", "B.py")) - test("partialMoves") { - os.list(d).collect(os.move.matching { case p / g"$x.scala" => p / g"$x.java" }) - assert(fileSet == Set("A.java", "B.java", "A.py", "B.py")) - os.list(d).collect(os.move.matching { case p / g"A.$x" => p / g"C.$x" }) - assert(fileSet == Set("C.java", "B.java", "C.py", "B.py")) - } - test("fullMoves") { - os.list(d).map(os.move.matching { case p / g"$x.$y" => p / g"$y.$x" }) - assert(fileSet == Set("scala.A", "scala.B", "py.A", "py.B")) - def die = os.list(d).map(os.move.matching { case p / g"A.$x" => p / g"C.$x" }) - intercept[MatchError] { die } - } - } - - test("deep") { - val d = testFolder / "moving2" - os.makeDir(d) - os.makeDir(d / "scala") - os.write(d / "scala" / "A", "AScala") - os.write(d / "scala" / "B", "BScala") - os.makeDir(d / "py") - os.write(d / "py" / "A", "APy") - os.write(d / "py" / "B", "BPy") - test("partialMoves") { - os.walk(d).collect(os.move.matching { case d / "py" / x => d / x }) - assert( - os.walk(d).toSet == Set( - d / "py", - d / "scala", - d / "scala" / "A", - d / "scala" / "B", - d / "A", - d / "B" - ) - ) - } - test("fullMoves") { - def die = os.walk(d).map(os.move.matching { case d / "py" / x => d / x }) - intercept[MatchError] { die } - - os.walk(d).filter(os.isFile).map(os.move.matching { - case d / "py" / x => d / "scala" / "py" / x - case d / "scala" / x => d / "py" / "scala" / x - case d => println("NOT FOUND " + d); d - }) + test("charset") { + + val d = testFolder / "readWrite" + os.makeDir.all(d) + os.write.over(d / "charset.txt", "funcionó".getBytes(Charset.forName("Windows-1252"))) + assert(os.read.lines( + d / "charset.txt", + Charset.forName("Windows-1252") + ).head == "funcionó") + } - assert( - os.walk(d).toSet == Set( - d / "py", - d / "scala", - d / "py" / "scala", - d / "scala" / "py", - d / "scala" / "py" / "A", - d / "scala" / "py" / "B", - d / "py" / "scala" / "A", - d / "py" / "scala" / "B" - ) - ) - } - } - // ls! wd | mv* - } + test("listNonExistentFailure") - { + val d = testFolder / "readWrite" + intercept[nio.NoSuchFileException](os.list(d / "nonexistent")) + } - test("mkdirRm") { - test("singleFolder") { - val single = testFolder / "single" - os.makeDir.all(single / "inner") - assert(os.list(single) == Seq(single / "inner")) - os.remove(single / "inner") - assert(os.list(single) == Seq()) - } - test("nestedFolders") { - val nested = testFolder / "nested" - os.makeDir.all(nested / "inner" / "innerer" / "innerest") - assert( - os.list(nested) == Seq(nested / "inner"), - os.list(nested / "inner") == Seq(nested / "inner" / "innerer"), - os.list(nested / "inner" / "innerer") == Seq(nested / "inner" / "innerer" / "innerest") + // Not sure why this doesn't work on native + test("redirectSubprocessInheritedOutput") { + if (Unix()) { // relies on bash scripts that don't run on windows + val scriptFolder = os.Path(sys.env("OS_TEST_RESOURCE_FOLDER")) / "test" + val lines = collection.mutable.Buffer.empty[String] + os.Inherit.out.withValue(os.ProcessOutput.Readlines(lines.append(_))) { + // Redirected + os.proc(scriptFolder / "misc/echo_with_wd", "HELLO\nWorld").call( + cwd = os.root / "usr", + stdout = os.Inherit + ) + // Not Redirected + os.proc(scriptFolder / "misc/echo_with_wd", "hello\nWORLD").call( + cwd = os.root / "usr", + stdout = os.InheritRaw ) - os.remove.all(nested / "inner") - assert(os.list(nested) == Seq()) - } - } - - test("readWrite") { - val d = testFolder / "readWrite" - os.makeDir.all(d) - test("simple") { - os.write(d / "file", "i am a cow") - assert(os.read(d / "file") == "i am a cow") - } - test("autoMkdir") { - os.write(d / "folder" / "folder" / "file", "i am a cow", createFolders = true) - assert(os.read(d / "folder" / "folder" / "file") == "i am a cow") - } - test("binary") { - os.write(d / "file", Array[Byte](1, 2, 3, 4)) - assert(os.read(d / "file").toSeq == Array[Byte](1, 2, 3, 4).toSeq) - } - test("concatenating") { - os.write(d / "concat1", Seq("a", "b", "c")) - assert(os.read(d / "concat1") == "abc") - os.write(d / "concat2", Seq(Array[Byte](1, 2), Array[Byte](3, 4))) - assert(os.read.bytes(d / "concat2").toSeq == Array[Byte](1, 2, 3, 4).toSeq) - os.write(d / "concat3", geny.Generator(Array[Byte](1, 2), Array[Byte](3, 4))) - assert(os.read.bytes(d / "concat3").toSeq == Array[Byte](1, 2, 3, 4).toSeq) - } - test("writeAppend") { - os.write.append(d / "append.txt", "Hello") - assert(os.read(d / "append.txt") == "Hello") - os.write.append(d / "append.txt", " World") - assert(os.read(d / "append.txt") == "Hello World") - } - test("writeOver") { - os.write.over(d / "append.txt", "Hello") - assert(os.read(d / "append.txt") == "Hello") - os.write.over(d / "append.txt", " Wor") - assert(os.read(d / "append.txt") == " Wor") - } - test("charset") { - os.write.over(d / "charset.txt", "funcionó".getBytes(Charset.forName("Windows-1252"))) - assert(os.read.lines( - d / "charset.txt", - Charset.forName("Windows-1252") - ).head == "funcionó") - } - } - - test("Failures") { - val d = testFolder / "failures" - os.makeDir.all(d) - test("nonexistant") { - test - intercept[nio.NoSuchFileException](os.list(d / "nonexistent")) - test - intercept[nio.NoSuchFileException](os.read(d / "nonexistent")) - test - intercept[nio.NoSuchFileException](os.copy(d / "nonexistent", d / "yolo")) - test - intercept[nio.NoSuchFileException](os.move(d / "nonexistent", d / "yolo")) - } - test("collisions") { - os.makeDir.all(d / "folder") - os.write(d / "file", "lolol") - test - intercept[nio.FileAlreadyExistsException](os.move(d / "file", d / "folder")) - test - intercept[nio.FileAlreadyExistsException](os.copy(d / "file", d / "folder")) - test - intercept[nio.FileAlreadyExistsException](os.write(d / "file", "lols")) } + assert(lines == Seq("HELLO", "World /usr")) } } } diff --git a/os/test/src-jvm/PathTestsCustomFilesystem.scala b/os/test/src-jvm/PathTestsCustomFilesystem.scala index 3b05265f..89e02258 100644 --- a/os/test/src-jvm/PathTestsCustomFilesystem.scala +++ b/os/test/src-jvm/PathTestsCustomFilesystem.scala @@ -2,11 +2,12 @@ package test.os import utest._ import os._ + import java.util.HashMap -import java.nio.file.FileSystems +import java.nio.file.{FileAlreadyExistsException, FileSystem, FileSystems} import java.net.URI -import java.nio.file.FileSystem -import java.nio.file.Paths +import scala.util.{Failure, Try} +import scala.util.control.NonFatal object PathTestsCustomFilesystem extends TestSuite { @@ -15,7 +16,7 @@ object PathTestsCustomFilesystem extends TestSuite { path.toUri() } - def withCustomFs(f: FileSystem => Unit, fsUri: URI = customFsUri()): Unit = { + def withCustomFs[T](f: FileSystem => T, fsUri: URI = customFsUri()): T = { val uri = new URI("jar", fsUri.toString(), null); val env = new HashMap[String, String](); env.put("create", "true"); @@ -23,7 +24,7 @@ object PathTestsCustomFilesystem extends TestSuite { val p = os.root("/", fs) try { os.makeDir(p / "test") - os.makeDir(p / "test" / "dir") + os.makeDir(p / "test/dir") f(fs) } finally { cleanUpFs(fs, fsUri) @@ -39,7 +40,7 @@ object PathTestsCustomFilesystem extends TestSuite { test("customFilesystem") { test("createPath") { withCustomFs { fileSystem => - val p = os.root("/", fileSystem) / "test" / "dir" + val p = os.root("/", fileSystem) / "test/dir" assert(p.root == "/") assert(p.fileSystem == fileSystem) } @@ -54,7 +55,7 @@ object PathTestsCustomFilesystem extends TestSuite { } test("removeDir") { withCustomFs { fileSystem => - val p = os.root("/", fileSystem) / "test" / "dir" / "dir2" + val p = os.root("/", fileSystem) / "test/dir/dir2" os.makeDir.all(p) assert(os.exists(p)) os.remove.all(p) @@ -63,7 +64,7 @@ object PathTestsCustomFilesystem extends TestSuite { } test("failTemp") { withCustomFs { fileSystem => - val p = os.root("/", fileSystem) / "test" / "dir" + val p = os.root("/", fileSystem) / "test/dir" intercept[UnsupportedOperationException] { os.temp.dir(dir = p) } @@ -71,7 +72,7 @@ object PathTestsCustomFilesystem extends TestSuite { } test("failProcCall") { withCustomFs { fileSystem => - val p = os.root("/", fileSystem) / "test" / "dir" + val p = os.root("/", fileSystem) / "test/dir" intercept[UnsupportedOperationException] { os.proc("echo", "hello").call(cwd = p) } @@ -79,30 +80,30 @@ object PathTestsCustomFilesystem extends TestSuite { } test("up") { withCustomFs { fileSystem => - val p = os.root("/", fileSystem) / "test" / "dir" + val p = os.root("/", fileSystem) / "test/dir" assert((p / os.up) == os.root("/", fileSystem) / "test") } } test("withRelPath") { withCustomFs { fileSystem => - val p = os.root("/", fileSystem) / "test" / "dir" + val p = os.root("/", fileSystem) / "test/dir" val rel = os.rel / os.up / "file.txt" - assert((p / rel) == os.root("/", fileSystem) / "test" / "file.txt") + assert((p / rel) == os.root("/", fileSystem) / "test/file.txt") } } test("withSubPath") { withCustomFs { fileSystem => - val p = os.root("/", fileSystem) / "test" / "dir" + val p = os.root("/", fileSystem) / "test/dir" val sub = os.sub / "file.txt" - assert((p / sub) == os.root("/", fileSystem) / "test" / "dir" / "file.txt") + assert((p / sub) == os.root("/", fileSystem) / "test/dir/file.txt") } } test("differentFsCompare") { withCustomFs { fs1 => withCustomFs( { fs2 => - val p1 = os.root("/", fs1) / "test" / "dir" - val p2 = os.root("/", fs2) / "test" / "dir" + val p1 = os.root("/", fs1) / "test/dir" + val p2 = os.root("/", fs2) / "test/dir" assert(p1 != p2) }, fsUri = customFsUri("bar.jar") @@ -113,8 +114,8 @@ object PathTestsCustomFilesystem extends TestSuite { withCustomFs { fs1 => withCustomFs( { fs2 => - val p1 = os.root("/", fs1) / "test" / "dir" - val p2 = os.root("/", fs2) / "test" / "dir" + val p1 = os.root("/", fs1) / "test/dir" + val p2 = os.root("/", fs2) / "test/dir" intercept[IllegalArgumentException] { p1.relativeTo(p2) } @@ -127,8 +128,8 @@ object PathTestsCustomFilesystem extends TestSuite { withCustomFs { fs1 => withCustomFs( { fs2 => - val p1 = os.root("/", fs1) / "test" / "dir" - val p2 = os.root("/", fs2) / "test" / "dir" + val p1 = os.root("/", fs1) / "test/dir" + val p2 = os.root("/", fs2) / "test/dir" intercept[IllegalArgumentException] { p1.subRelativeTo(p2) } @@ -144,14 +145,14 @@ object PathTestsCustomFilesystem extends TestSuite { test("customFilesystem") { test("writeAndRead") { withCustomFs { fileSystem => - val p = root("/", fileSystem) / "test" / "dir" + val p = root("/", fileSystem) / "test/dir" os.write(p / "file.txt", "Hello") assert(os.read(p / "file.txt") == "Hello") } } test("writeOver") { withCustomFs { fileSystem => - val p = os.root("/", fileSystem) / "test" / "dir" + val p = os.root("/", fileSystem) / "test/dir" os.write(p / "file.txt", "Hello World") os.write.over(p / "file.txt", "Hello World2") assert(os.read(p / "file.txt") == "Hello World2") @@ -159,7 +160,7 @@ object PathTestsCustomFilesystem extends TestSuite { } test("move") { withCustomFs { fileSystem => - val p = os.root("/", fileSystem) / "test" / "dir" + val p = os.root("/", fileSystem) / "test/dir" os.write(p / "file.txt", "Hello World") os.move(p / "file.txt", p / "file2.txt") assert(os.read(p / "file2.txt") == "Hello World") @@ -168,16 +169,59 @@ object PathTestsCustomFilesystem extends TestSuite { } test("copy") { withCustomFs { fileSystem => - val p = os.root("/", fileSystem) / "test" / "dir" + val p = os.root("/", fileSystem) / "test/dir" os.write(p / "file.txt", "Hello World") os.copy(p / "file.txt", p / "file2.txt") assert(os.read(p / "file2.txt") == "Hello World") assert(os.exists(p / "file.txt")) } } + test("copyAndMergeToRootDirectoryWithCreateFolders") { + withCustomFs { fileSystem => + val root = os.root("/", fileSystem) + val file = root / "test/dir/file.txt" + os.write(file, "Hello World") + os.copy(root / "test/dir", root, createFolders = true, mergeFolders = true) + assert(os.read(root / "file.txt") == "Hello World") + assert(os.exists(root / "file.txt")) + } + } + test("failMoveToRootDirectoryWithCreateFolders") { + withCustomFs { fileSystem => + val root = os.root("/", fileSystem) + // This should fail. Just test that it doesn't throw PathError.AbsolutePathOutsideRoot. + intercept[FileAlreadyExistsException] { + os.move(root / "test/dir", root, createFolders = true) + } + } + } + test("copyMatchingAndMergeToRootDirectory") { + withCustomFs { fileSystem => + val root = os.root("/", fileSystem) + val file = root / "test/dir/file.txt" + os.write(file, "Hello World") + os.list(root / "test").collect(os.copy.matching(mergeFolders = true) { + case p / "test" / _ => p + }) + assert(os.read(root / "file.txt") == "Hello World") + assert(os.exists(root / "file.txt")) + } + } + test("failMoveMatchingToRootDirectory") { + withCustomFs { fileSystem => + // can't use a `intercept`, see https://github.com/com-lihaoyi/os-lib/pull/267#issuecomment-2116131445 + Try { + os.list(os.root("/", fileSystem)).collect(os.move.matching { case p / "test" => p }) + } match { + case Failure(e @ (_: IllegalArgumentException | _: FileAlreadyExistsException)) + if !e.isInstanceOf[PathError.AbsolutePathOutsideRoot.type] => + e.getMessage + } + } + } test("remove") { withCustomFs { fileSystem => - val p = os.root("/", fileSystem) / "test" / "dir" + val p = os.root("/", fileSystem) / "test/dir" os.write(p / "file.txt", "Hello World") assert(os.exists(p / "file.txt")) os.remove(p / "file.txt") @@ -186,7 +230,7 @@ object PathTestsCustomFilesystem extends TestSuite { } test("removeAll") { withCustomFs { fileSystem => - val p = os.root("/", fileSystem) / "test" / "dir" + val p = os.root("/", fileSystem) / "test/dir" os.write(p / "file.txt", "Hello World") os.write(p / "file2.txt", "Hello World") os.remove.all(p) @@ -196,7 +240,7 @@ object PathTestsCustomFilesystem extends TestSuite { } test("failSymlink") { withCustomFs { fileSystem => - val p = os.root("/", fileSystem) / "test" / "dir" + val p = os.root("/", fileSystem) / "test/dir" os.write(p / "file.txt", "Hello World") intercept[UnsupportedOperationException] { os.symlink(p / "link", p / "file.txt") @@ -205,12 +249,12 @@ object PathTestsCustomFilesystem extends TestSuite { } test("walk") { withCustomFs { fileSystem => - val p = os.root("/", fileSystem) / "test" / "dir" + val p = os.root("/", fileSystem) / "test/dir" os.write(p / "file.txt", "Hello World") os.write(p / "file2.txt", "Hello World") os.write(p / "file3.txt", "Hello World") os.makeDir(p / "dir2") - os.write(p / "dir2" / "file.txt", "Hello World") + os.write(p / "dir2/file.txt", "Hello World") assert(os.walk(p).map(_.relativeTo(p)).toSet == Set( RelPath("file.txt"), diff --git a/os/test/src-jvm/ProcessPipelineTests.scala b/os/test/src-jvm/ProcessPipelineTests.scala index bfeb6326..389fcfc8 100644 --- a/os/test/src-jvm/ProcessPipelineTests.scala +++ b/os/test/src-jvm/ProcessPipelineTests.scala @@ -9,7 +9,7 @@ import TestUtil.prep import scala.util.Try object ProcessPipelineTests extends TestSuite { - val scriptFolder = pwd / "os" / "test" / "resources" / "scripts" + val scriptFolder = os.Path(sys.env("OS_TEST_RESOURCE_FOLDER")) / "scripts" lazy val scalaHome = sys.env("SCALA_HOME") diff --git a/os/test/src-jvm/SpawningSubprocessesNewTests.scala b/os/test/src-jvm/SpawningSubprocessesNewTests.scala new file mode 100644 index 00000000..a2086d17 --- /dev/null +++ b/os/test/src-jvm/SpawningSubprocessesNewTests.scala @@ -0,0 +1,262 @@ +package test.os + +import java.io.{BufferedReader, InputStreamReader} +import os.ProcessOutput + +import scala.collection.mutable +import test.os.TestUtil.prep +import utest._ + +import java.nio.channels.FileChannel +import java.nio.file.StandardOpenOption +import java.util + +object SpawningSubprocessesNewTests extends TestSuite { + + def tests = Tests { + test("call") { + test - prep { wd => + if (Unix()) { + val res = os.call(cmd = ("ls", wd / "folder2")) + + res.exitCode ==> 0 + + res.out.text() ==> + """nestedA + |nestedB + |""".stripMargin + + res.out.trim() ==> + """nestedA + |nestedB""".stripMargin + + res.out.lines() ==> Seq( + "nestedA", + "nestedB" + ) + + res.out.bytes + + val thrown = intercept[os.SubprocessException] { + os.call(cmd = ("ls", "doesnt-exist"), cwd = wd) + } + + assert(thrown.result.exitCode != 0) + + val fail = + os.call(cmd = ("ls", "doesnt-exist"), cwd = wd, check = false, stderr = os.Pipe) + + assert(fail.exitCode != 0) + + fail.out.text() ==> "" + + assert(fail.err.text().contains("No such file or directory")) + + // You can pass in data to a subprocess' stdin + val hash = os.call(cmd = ("shasum", "-a", "256"), stdin = "Hello World") + hash.out.trim() ==> "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b277d9ad9f146e -" + + // Taking input from a file and directing output to another file + os.call(cmd = ("base64"), stdin = wd / "File.txt", stdout = wd / "File.txt.b64") + + os.read(wd / "File.txt.b64") ==> "SSBhbSBjb3c=\n" + + if (false) { + os.call(cmd = ("vim"), stdin = os.Inherit, stdout = os.Inherit, stderr = os.Inherit) + } + } + } + test - prep { wd => + if (Unix()) { + val ex = intercept[os.SubprocessException] { + os.call(cmd = ("bash", "-c", "echo 123; sleep 10; echo 456"), timeout = 2000) + } + + ex.result.out.trim() ==> "123" + } + } + } + test("stream") { + test - prep { wd => + if (Unix()) { + var lineCount = 1 + os.call( + cmd = ("find", "."), + cwd = wd, + stdout = + os.ProcessOutput((buf, len) => lineCount += buf.slice(0, len).count(_ == '\n')) + ) + lineCount ==> 22 + } + } + test - prep { wd => + if (Unix()) { + var lineCount = 1 + os.call( + cmd = ("find", "."), + cwd = wd, + stdout = os.ProcessOutput.Readlines(line => lineCount += 1) + ) + lineCount ==> 22 + } + } + } + + test("spawn python") { + test - prep { wd => + if (TestUtil.isInstalled("python") && Unix()) { + // Start a long-lived python process which you can communicate with + val sub = os.spawn( + cmd = ( + "python", + "-u", + "-c", + if (TestUtil.isPython3()) "while True: print(eval(input()))" + else "while True: print(eval(raw_input()))" + ), + cwd = wd + ) + + // Sending some text to the subprocess + sub.stdin.write("1 + 2") + sub.stdin.writeLine("+ 4") + sub.stdin.flush() + sub.stdout.readLine() ==> "7" + + sub.stdin.write("'1' + '2'") + sub.stdin.writeLine("+ '4'") + sub.stdin.flush() + sub.stdout.readLine() ==> "124" + + // Sending some bytes to the subprocess + sub.stdin.write("1 * 2".getBytes) + sub.stdin.write("* 4\n".getBytes) + sub.stdin.flush() + sub.stdout.read() ==> '8'.toByte + + sub.destroy() + } + } + } + test("spawn curl") { + if ( + Unix() && // shasum seems to not accept stdin on Windows + TestUtil.isInstalled("curl") && + TestUtil.isInstalled("gzip") && + TestUtil.isInstalled("shasum") + ) { + // You can chain multiple subprocess' stdin/stdout together + val curl = + os.spawn(cmd = ("curl", "-L", ExampleResourcess.RemoteReadme.url), stderr = os.Inherit) + val gzip = os.spawn(cmd = ("gzip", "-n", "-6"), stdin = curl.stdout) + val sha = os.spawn(cmd = ("shasum", "-a", "256"), stdin = gzip.stdout) + sha.stdout.trim() ==> s"${ExampleResourcess.RemoteReadme.gzip6ShaSum256} -" + } + } + test("spawn callback") - prep { wd => + if (TestUtil.isInstalled("echo") && Unix()) { + val output: mutable.Buffer[String] = mutable.Buffer() + val sub = os.spawn( + cmd = ("echo", "output"), + stdout = ProcessOutput((bytes, count) => output += new String(bytes, 0, count)) + ) + val finished = sub.join(5000) + sub.wrapped.getOutputStream().flush() + assert(finished) + assert(sub.exitCode() == 0) + val expectedOutput = "output\n" + val actualOutput = output.mkString("") + assert(actualOutput == expectedOutput) + sub.destroy() + } + } + def tryLock(p: os.Path) = FileChannel + .open(p.toNIO, util.EnumSet.of(StandardOpenOption.READ, StandardOpenOption.WRITE)) + .tryLock() + def waitForLockTaken(p: os.Path) = { + while ({ + val waitLock = tryLock(p) + if (waitLock != null) { + waitLock.release() + true + } else false + }) Thread.sleep(1) + } + + test("destroy") { + if (Unix()) { + val temp1 = os.temp() + val sub1 = os.spawn((sys.env("TEST_SPAWN_EXIT_HOOK_ASSEMBLY"), temp1)) + waitForLockTaken(temp1) + sub1.destroy() + assert(!sub1.isAlive()) + + val temp2 = os.temp() + val sub2 = os.spawn((sys.env("TEST_SPAWN_EXIT_HOOK_ASSEMBLY"), temp2)) + waitForLockTaken(temp2) + sub2.destroy(async = true) + assert(sub2.isAlive()) + } + } + + test("spawnExitHook") { + test("destroyDefaultGrace") { + if (Unix()) { + val temp = os.temp() + val lock0 = tryLock(temp) + // file starts off not locked so can be taken and released + assert(lock0 != null) + lock0.release() + + val subprocess = os.spawn((sys.env("TEST_SPAWN_EXIT_HOOK_ASSEMBLY"), temp)) + waitForLockTaken(temp) + + subprocess.destroy() + // after calling destroy on the subprocess, the transitive subprocess + // should be killed by the exit hook, so the lock can now be taken + val lock = tryLock(temp) + assert(lock != null) + lock.release() + } + } + + test("destroyNoGrace") - retry(3) { + if (Unix()) { + val temp = os.temp() + val subprocess = os.spawn((sys.env("TEST_SPAWN_EXIT_HOOK_ASSEMBLY"), temp)) + waitForLockTaken(temp) + + subprocess.destroy(shutdownGracePeriod = 0) + // this should fail since the subprocess is shut down forcibly without grace period + // so there is no time for any exit hooks to run to shut down the transitive subprocess + val lock = tryLock(temp) + assert(lock == null) + } + } + + test("infiniteGrace") { + if (Unix()) { + val temp = os.temp() + val lock0 = tryLock(temp) + // file starts off not locked so can be taken and released + assert(lock0 != null) + lock0.release() + + // Force the subprocess exit to stall for 500ms + val subprocess = os.spawn((sys.env("TEST_SPAWN_EXIT_HOOK_ASSEMBLY"), temp, 500)) + waitForLockTaken(temp) + + val start = System.currentTimeMillis() + subprocess.destroy(shutdownGracePeriod = -1) + val end = System.currentTimeMillis() + // Because we set the shutdownGracePeriod to -1, it takes more than 500ms to shutdown, + // even though the default shutdown grace period is 100. But the sub-sub-process will + // have been shut down by the time the sub-process exits, so the lock is available + assert(end - start > 500) + val lock = tryLock(temp) + assert(lock != null) + } + } + } + } +} diff --git a/os/test/src-jvm/SpawningSubprocessesTests.scala b/os/test/src-jvm/SpawningSubprocessesTests.scala index 70140316..633a114f 100644 --- a/os/test/src-jvm/SpawningSubprocessesTests.scala +++ b/os/test/src-jvm/SpawningSubprocessesTests.scala @@ -11,159 +11,157 @@ import utest._ object SpawningSubprocessesTests extends TestSuite { def tests = Tests { - test("proc") { - test("call") { - test - prep { wd => - if (Unix()) { - val res = os.proc("ls", wd / "folder2").call() - - res.exitCode ==> 0 - - res.out.text() ==> - """nestedA - |nestedB - |""".stripMargin - - res.out.trim() ==> - """nestedA - |nestedB""".stripMargin - - res.out.lines() ==> Seq( - "nestedA", - "nestedB" - ) + test("call") { + test - prep { wd => + if (Unix()) { + val res = os.proc("ls", wd / "folder2").call() + + res.exitCode ==> 0 + + res.out.text() ==> + """nestedA + |nestedB + |""".stripMargin - res.out.bytes + res.out.trim() ==> + """nestedA + |nestedB""".stripMargin - val thrown = intercept[os.SubprocessException] { - os.proc("ls", "doesnt-exist").call(cwd = wd) - } + res.out.lines() ==> Seq( + "nestedA", + "nestedB" + ) + + res.out.bytes + + val thrown = intercept[os.SubprocessException] { + os.proc("ls", "doesnt-exist").call(cwd = wd) + } - assert(thrown.result.exitCode != 0) + assert(thrown.result.exitCode != 0) - val fail = os.proc("ls", "doesnt-exist").call(cwd = wd, check = false, stderr = os.Pipe) + val fail = os.proc("ls", "doesnt-exist").call(cwd = wd, check = false, stderr = os.Pipe) - assert(fail.exitCode != 0) + assert(fail.exitCode != 0) - fail.out.text() ==> "" + fail.out.text() ==> "" - assert(fail.err.text().contains("No such file or directory")) + assert(fail.err.text().contains("No such file or directory")) - // You can pass in data to a subprocess' stdin - val hash = os.proc("shasum", "-a", "256").call(stdin = "Hello World") - hash.out.trim() ==> "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b277d9ad9f146e -" + // You can pass in data to a subprocess' stdin + val hash = os.proc("shasum", "-a", "256").call(stdin = "Hello World") + hash.out.trim() ==> "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b277d9ad9f146e -" - // Taking input from a file and directing output to another file - os.proc("base64").call(stdin = wd / "File.txt", stdout = wd / "File.txt.b64") + // Taking input from a file and directing output to another file + os.proc("base64").call(stdin = wd / "File.txt", stdout = wd / "File.txt.b64") - os.read(wd / "File.txt.b64") ==> "SSBhbSBjb3c=\n" + os.read(wd / "File.txt.b64") ==> "SSBhbSBjb3c=\n" - if (false) { - os.proc("vim").call(stdin = os.Inherit, stdout = os.Inherit, stderr = os.Inherit) - } + if (false) { + os.proc("vim").call(stdin = os.Inherit, stdout = os.Inherit, stderr = os.Inherit) } } - test - prep { wd => - if (Unix()) { - val ex = intercept[os.SubprocessException] { - os.proc("bash", "-c", "echo 123; sleep 10; echo 456") - .call(timeout = 2000) - } - - ex.result.out.trim() ==> "123" + } + test - prep { wd => + if (Unix()) { + val ex = intercept[os.SubprocessException] { + os.proc("bash", "-c", "echo 123; sleep 10; echo 456") + .call(timeout = 2000) } + + ex.result.out.trim() ==> "123" } } - test("stream") { - test - prep { wd => - if (Unix()) { - var lineCount = 1 - os.proc("find", ".").call( - cwd = wd, - stdout = - os.ProcessOutput((buf, len) => lineCount += buf.slice(0, len).count(_ == '\n')) - ) - lineCount ==> 22 - } + } + test("stream") { + test - prep { wd => + if (Unix()) { + var lineCount = 1 + os.proc("find", ".").call( + cwd = wd, + stdout = + os.ProcessOutput((buf, len) => lineCount += buf.slice(0, len).count(_ == '\n')) + ) + lineCount ==> 22 } - test - prep { wd => - if (Unix()) { - var lineCount = 1 - os.proc("find", ".").call( - cwd = wd, - stdout = os.ProcessOutput.Readlines(line => lineCount += 1) - ) - lineCount ==> 22 - } + } + test - prep { wd => + if (Unix()) { + var lineCount = 1 + os.proc("find", ".").call( + cwd = wd, + stdout = os.ProcessOutput.Readlines(line => lineCount += 1) + ) + lineCount ==> 22 } } + } - test("spawn python") { - test - prep { wd => - if (TestUtil.isInstalled("python") && Unix()) { - // Start a long-lived python process which you can communicate with - val sub = os.proc( - "python", - "-u", - "-c", - if (TestUtil.isPython3()) "while True: print(eval(input()))" - else "while True: print(eval(raw_input()))" - ) - .spawn(cwd = wd) - - // Sending some text to the subprocess - sub.stdin.write("1 + 2") - sub.stdin.writeLine("+ 4") - sub.stdin.flush() - sub.stdout.readLine() ==> "7" - - sub.stdin.write("'1' + '2'") - sub.stdin.writeLine("+ '4'") - sub.stdin.flush() - sub.stdout.readLine() ==> "124" - - // Sending some bytes to the subprocess - sub.stdin.write("1 * 2".getBytes) - sub.stdin.write("* 4\n".getBytes) - sub.stdin.flush() - sub.stdout.read() ==> '8'.toByte - - sub.destroy() - } + test("spawn python") { + test - prep { wd => + if (TestUtil.isInstalled("python") && Unix()) { + // Start a long-lived python process which you can communicate with + val sub = os.proc( + "python", + "-u", + "-c", + if (TestUtil.isPython3()) "while True: print(eval(input()))" + else "while True: print(eval(raw_input()))" + ) + .spawn(cwd = wd) + + // Sending some text to the subprocess + sub.stdin.write("1 + 2") + sub.stdin.writeLine("+ 4") + sub.stdin.flush() + sub.stdout.readLine() ==> "7" + + sub.stdin.write("'1' + '2'") + sub.stdin.writeLine("+ '4'") + sub.stdin.flush() + sub.stdout.readLine() ==> "124" + + // Sending some bytes to the subprocess + sub.stdin.write("1 * 2".getBytes) + sub.stdin.write("* 4\n".getBytes) + sub.stdin.flush() + sub.stdout.read() ==> '8'.toByte + + sub.destroy() } } - test("spawn curl") { - if ( - Unix() && // shasum seems to not accept stdin on Windows - TestUtil.isInstalled("curl") && - TestUtil.isInstalled("gzip") && - TestUtil.isInstalled("shasum") - ) { - // You can chain multiple subprocess' stdin/stdout together - val curl = - os.proc("curl", "-L", ExampleResourcess.RemoteReadme.url).spawn(stderr = os.Inherit) - val gzip = os.proc("gzip", "-n", "-6").spawn(stdin = curl.stdout) - val sha = os.proc("shasum", "-a", "256").spawn(stdin = gzip.stdout) - sha.stdout.trim() ==> s"${ExampleResourcess.RemoteReadme.gzip6ShaSum256} -" - } + } + test("spawn curl") { + if ( + Unix() && // shasum seems to not accept stdin on Windows + TestUtil.isInstalled("curl") && + TestUtil.isInstalled("gzip") && + TestUtil.isInstalled("shasum") + ) { + // You can chain multiple subprocess' stdin/stdout together + val curl = + os.proc("curl", "-L", ExampleResourcess.RemoteReadme.url).spawn(stderr = os.Inherit) + val gzip = os.proc("gzip", "-n", "-6").spawn(stdin = curl.stdout) + val sha = os.proc("shasum", "-a", "256").spawn(stdin = gzip.stdout) + sha.stdout.trim() ==> s"${ExampleResourcess.RemoteReadme.gzip6ShaSum256} -" } - test("spawn callback") { - test - prep { wd => - if (TestUtil.isInstalled("echo") && Unix()) { - val output: mutable.Buffer[String] = mutable.Buffer() - val sub = os.proc("echo", "output") - .spawn(stdout = - ProcessOutput((bytes, count) => output += new String(bytes, 0, count)) - ) - val finished = sub.join(5000) - sub.wrapped.getOutputStream().flush() - assert(finished) - assert(sub.exitCode() == 0) - val expectedOutput = "output\n" - val actualOutput = output.mkString("") - assert(actualOutput == expectedOutput) - sub.destroy() - } + } + test("spawn callback") { + test - prep { wd => + if (TestUtil.isInstalled("echo") && Unix()) { + val output: mutable.Buffer[String] = mutable.Buffer() + val sub = os.proc("echo", "output") + .spawn(stdout = + ProcessOutput((bytes, count) => output += new String(bytes, 0, count)) + ) + val finished = sub.join(5000) + sub.wrapped.getOutputStream().flush() + assert(finished) + assert(sub.exitCode() == 0) + val expectedOutput = "output\n" + val actualOutput = output.mkString("") + assert(actualOutput == expectedOutput) + sub.destroy() } } } diff --git a/os/test/src-jvm/ZipOpJvmTests.scala b/os/test/src-jvm/ZipOpJvmTests.scala new file mode 100644 index 00000000..dfe4bad2 --- /dev/null +++ b/os/test/src-jvm/ZipOpJvmTests.scala @@ -0,0 +1,195 @@ +package test.os +import TestUtil.prep +import utest._ + +import java.nio.file.attribute.FileTime +import java.nio.file.{Files, Paths} +import java.util.zip.ZipFile +import scala.collection.JavaConverters._ + +object ZipOpJvmTests extends TestSuite { + + def tests = Tests { + + test("zipAndUnzipFolder") - prep { wd => + // Zipping files and folders in a new zip file + val zipFileName = "zip-file-test.zip" + val zipFile1: os.Path = os.zip( + dest = wd / zipFileName, + sources = Seq( + wd / "File.txt", + wd / "folder1" + ) + ) + // Adding files and folders to an existing zip file + os.zip( + dest = zipFile1, + sources = Seq( + wd / "folder2", + wd / "Multi Line.txt" + ) + ) + + // Unzip file to a destination folder + val unzippedFolder = os.unzip( + source = wd / zipFileName, + dest = wd / "unzipped folder" + ) + + val paths = os.walk(unzippedFolder) + val expected = Seq( + // Files get included in the zip root using their name + wd / "unzipped folder/File.txt", + wd / "unzipped folder/Multi Line.txt", + // Folder contents get included relative to the source root + wd / "unzipped folder/nestedA", + wd / "unzipped folder/nestedB", + wd / "unzipped folder/one.txt", + wd / "unzipped folder/nestedA/a.txt", + wd / "unzipped folder/nestedB/b.txt" + ) + assert(paths.sorted == expected) + } + + test("zipAndUnzipPreserveMtimes") - prep { wd => + // Create a file and set its modification time + val testFile = wd / "FileWithMtime.txt" + os.write(testFile, "Test content") + + // Use basic System.currentTimeMillis() for modification time + val originalMtime = System.currentTimeMillis() - (1 * 60 * 1000) // 1 minute ago + val path = Paths.get(testFile.toString) + Files.setLastModifiedTime(path, FileTime.fromMillis(originalMtime)) + + // Zipping the file with preserveMtimes = true + val zipFile = os.zip( + dest = wd / "zipWithMtimePreservation.zip", + sources = List(testFile), + preserveMtimes = true + ) + + val existingZipFile = new ZipFile(zipFile.toNIO.toFile) + val actualMTime = existingZipFile.entries().asScala.toList.head.getTime + + // Compare the original and actual modification times (in minutes) + assert((originalMtime / (1000 * 60)) == (actualMTime / (1000 * 60))) + } + + def zipAndUnzipDontPreserveMtimes(wd: os.Path, exerciseAppend: Boolean) = { + + val testFile = wd / "FileWithMtime.txt" + os.write.over(testFile, "Test content") + val testFile2 = wd / "FileWithMtime2.txt" + + val mtime1 = os.mtime(testFile) + + val zipFile1 = os.zip( + dest = wd / "zipWithoutMtimes1.zip", + sources = List(testFile), + preserveMtimes = false + ) + + if (exerciseAppend) { + + os.write.over(testFile2, "Test content2") + os.zip( + dest = wd / "zipWithoutMtimes1.zip", + sources = List(testFile2), + preserveMtimes = false + ) + } + + // Sleep a bit to make sure the mtime has time to change, since zip files may + // have a very coarse granulity of up to two seconds + // https://stackoverflow.com/questions/64048499/zipfile-lib-weird-behaviour-with-seconds-in-modified-time + Thread.sleep(5000) + os.write.over(testFile, "Test content") + + val mtime2 = os.mtime(testFile) + + val zipFile2 = os.zip( + dest = wd / "zipWithoutMtimes2.zip", + sources = List(testFile), + preserveMtimes = false + ) + + if (exerciseAppend) { + os.write.over(testFile2, "Test content2") + os.zip( + dest = wd / "zipWithoutMtimes2.zip", + sources = List(testFile2), + preserveMtimes = false + ) + } + + // Even though the mtimes of the two included files are different, the two + // final zip files end up being byte-for-byte the same because the mtimes get wiped + assert(mtime1 != mtime2) + assert(java.util.Arrays.equals(os.read.bytes(zipFile1), os.read.bytes(zipFile2))) + } + + test("zipAndUnzipDontPreserveMtimes") { + test("noAppend") - prep { wd => zipAndUnzipDontPreserveMtimes(wd, false) } + test("append") - prep { wd => zipAndUnzipDontPreserveMtimes(wd, true) } + } + + test("deletePatterns") - prep { wd => + val amxFile = "File.amx" + os.copy(wd / "File.txt", wd / amxFile) + + // Zipping files and folders in a new zip file + val zipFileName = "zipByDeletingCertainFiles.zip" + val zipFile1: os.Path = os.zip( + dest = wd / zipFileName, + sources = List( + wd / "File.txt", + wd / amxFile, + wd / "Multi Line.txt" + ) + ) + + os.zip( + dest = zipFile1, + deletePatterns = List(amxFile.r) + ) + + // Unzip file to check for contents + val outputZipFilePath = os.unzip( + zipFile1, + dest = wd / "zipByDeletingCertainFiles" + ) + val paths = os.walk(wd / "zipByDeletingCertainFiles").sorted + val expected = Seq( + outputZipFilePath / "File.txt", + outputZipFilePath / "Multi Line.txt" + ) + + assert(paths == expected) + } + + test("open") - prep { wd => + val zipFile = os.zip.open(wd / "zip-test.zip") + try { + os.copy(wd / "File.txt", zipFile / "File.txt") + os.copy(wd / "folder1", zipFile / "folder1") + os.copy(wd / "folder2", zipFile / "folder2") + } finally zipFile.close() + + val zipFile2 = os.zip.open(wd / "zip-test.zip") + try { + os.list(zipFile2) ==> Vector( + zipFile2 / "File.txt", + zipFile2 / "folder1", + zipFile2 / "folder2" + ) + os.remove.all(zipFile2 / "folder2") + os.remove(zipFile2 / "File.txt") + } finally zipFile2.close() + + val zipFile3 = os.zip.open(wd / "zip-test.zip") + try os.list(zipFile3) ==> Vector(zipFile3 / "folder1") + finally zipFile3.close() + + } + } +} diff --git a/os/test/src/CheckerTests.scala b/os/test/src/CheckerTests.scala new file mode 100644 index 00000000..64c3a3dd --- /dev/null +++ b/os/test/src/CheckerTests.scala @@ -0,0 +1,484 @@ +package test.os + +import test.os.TestUtil._ +import utest._ + +object CheckerTests extends TestSuite { + + def tests: Tests = Tests { + // restricted directory + val rd = os.Path(sys.env("OS_TEST_RESOURCE_FOLDER")) / "restricted" + + test("stat") { + test("mtime") - prepChecker { wd => + val before = os.mtime(rd / "File.txt") + intercept[WriteDenied] { + os.mtime.set(rd / "File.txt", 0) + } + os.mtime(rd / "File.txt") ==> before + + os.mtime.set(wd / "File.txt", 0) + os.mtime(wd / "File.txt") ==> 0 + + os.mtime.set(wd / "File.txt", 90000) + os.mtime(wd / "File.txt") ==> 90000 + os.mtime(wd / "misc/file-symlink") ==> 90000 + + os.mtime.set(wd / "misc/file-symlink", 70000) + os.mtime(wd / "File.txt") ==> 70000 + os.mtime(wd / "misc/file-symlink") ==> 70000 + assert(os.mtime(wd / "misc/file-symlink", followLinks = false) != 40000) + } + } + + test("perms") { + test - prepChecker { wd => + if (Unix()) { + val before = os.perms(rd / "File.txt") + intercept[WriteDenied] { + os.perms.set(rd / "File.txt", "rwxrwxrwx") + } + os.perms(rd / "File.txt") ==> before + + os.perms.set(wd / "File.txt", "rwxrwxrwx") + os.perms(wd / "File.txt").toString() ==> "rwxrwxrwx" + os.perms(wd / "File.txt").toInt() ==> Integer.parseInt("777", 8) + + os.perms.set(wd / "File.txt", Integer.parseInt("755", 8)) + os.perms(wd / "File.txt").toString() ==> "rwxr-xr-x" + + os.perms.set(wd / "File.txt", "r-xr-xr-x") + os.perms.set(wd / "File.txt", Integer.parseInt("555", 8)) + } + } + test("owner") - prepChecker { wd => + if (Unix()) { + // Only works as root :( + if (false) { + intercept[WriteDenied] { + os.owner.set(rd / "File.txt", "nobody") + } + + val originalOwner = os.owner(wd / "File.txt") + + os.owner.set(wd / "File.txt", "nobody") + os.owner(wd / "File.txt").getName ==> "nobody" + + os.owner.set(wd / "File.txt", originalOwner) + } + } + } + test("group") - prepChecker { wd => + if (Unix()) { + // Only works as root :( + if (false) { + intercept[WriteDenied] { + os.group.set(rd / "File.txt", "nobody") + } + + val originalGroup = os.group(wd / "File.txt") + + os.group.set(wd / "File.txt", "nobody") + os.group(wd / "File.txt").getName ==> "nobody" + + os.group.set(wd / "File.txt", originalGroup) + } + } + } + } + + test("move") - prepChecker { wd => + intercept[WriteDenied] { + os.move(rd / "folder1/one.txt", wd / "folder1/File.txt") + } + os.list(wd / "folder1") ==> Seq(wd / "folder1/one.txt") + + intercept[WriteDenied] { + os.move(wd / "folder1/one.txt", rd / "folder1/File.txt") + } + os.list(rd / "folder1") ==> Seq(rd / "folder1/one.txt") + + intercept[WriteDenied] { + os.move(wd / "folder2/nestedA", rd / "folder2/nestedC") + } + os.list(rd / "folder2") ==> Seq(rd / "folder2/nestedA", rd / "folder2/nestedB") + + os.list(wd / "folder1") ==> Seq(wd / "folder1/one.txt") + os.move(wd / "folder1/one.txt", wd / "folder1/first.txt") + os.list(wd / "folder1") ==> Seq(wd / "folder1/first.txt") + + os.list(wd / "folder2") ==> Seq(wd / "folder2/nestedA", wd / "folder2/nestedB") + os.move(wd / "folder2/nestedA", wd / "folder2/nestedC") + os.list(wd / "folder2") ==> Seq(wd / "folder2/nestedB", wd / "folder2/nestedC") + + os.read(wd / "File.txt") ==> "I am cow" + os.move(wd / "Multi Line.txt", wd / "File.txt", replaceExisting = true) + os.read(wd / "File.txt") ==> + """I am cow + |Hear me moo + |I weigh twice as much as you + |And I look good on the barbecue""".stripMargin + } + test("copy") - prepChecker { wd => + intercept[ReadDenied] { + os.copy(rd / "folder1/one.txt", wd / "folder1/File.txt") + } + os.list(wd / "folder1") ==> Seq(wd / "folder1/one.txt") + + intercept[WriteDenied] { + os.copy(wd / "folder1/one.txt", rd / "folder1/File.txt") + } + os.list(rd / "folder1") ==> Seq(rd / "folder1/one.txt") + + intercept[WriteDenied] { + os.copy(wd / "folder2/nestedA", rd / "folder2/nestedC") + } + os.list(rd / "folder2") ==> Seq(rd / "folder2/nestedA", rd / "folder2/nestedB") + + os.list(wd / "folder1") ==> Seq(wd / "folder1/one.txt") + os.copy(wd / "folder1/one.txt", wd / "folder1/first.txt") + os.list(wd / "folder1") ==> Seq(wd / "folder1/first.txt", wd / "folder1/one.txt") + + os.list(wd / "folder2") ==> Seq(wd / "folder2/nestedA", wd / "folder2/nestedB") + os.copy(wd / "folder2/nestedA", wd / "folder2/nestedC") + os.list(wd / "folder2") ==> Seq( + wd / "folder2/nestedA", + wd / "folder2/nestedB", + wd / "folder2/nestedC" + ) + + os.read(wd / "File.txt") ==> "I am cow" + os.copy(wd / "Multi Line.txt", wd / "File.txt", replaceExisting = true) + os.read(wd / "File.txt") ==> + """I am cow + |Hear me moo + |I weigh twice as much as you + |And I look good on the barbecue""".stripMargin + } + test("makeDir") { + test - prepChecker { wd => + intercept[WriteDenied] { + os.makeDir(rd / "new_folder") + } + os.exists(rd / "new_folder") ==> false + + os.exists(wd / "new_folder") ==> false + os.makeDir(wd / "new_folder") + os.exists(wd / "new_folder") ==> true + } + test("all") - prepChecker { wd => + intercept[WriteDenied] { + os.makeDir.all(rd / "new_folder/inner/deep") + } + os.exists(rd / "new_folder") ==> false + + os.exists(wd / "new_folder") ==> false + os.makeDir.all(wd / "new_folder/inner/deep") + os.exists(wd / "new_folder/inner/deep") ==> true + } + } + test("remove") { + test - prepChecker { wd => + intercept[WriteDenied] { + os.remove(rd / "File.txt") + } + os.exists(rd / "File.txt") ==> true + + intercept[WriteDenied] { + os.remove(rd / "folder1") + } + os.list(rd / "folder1") ==> Seq(rd / "folder1/one.txt") + + Unchecked.scope(os.makeDir(rd / "folder"), os.remove(rd / "folder")) { + intercept[WriteDenied] { + os.remove(rd / "folder") + } + os.exists(rd / "folder") ==> true + } + os.exists(rd / "folder") ==> false + + os.exists(wd / "File.txt") ==> true + os.remove(wd / "File.txt") + os.exists(wd / "File.txt") ==> false + + os.exists(wd / "folder1/one.txt") ==> true + os.remove(wd / "folder1/one.txt") + os.remove(wd / "folder1") + os.exists(wd / "folder1/one.txt") ==> false + os.exists(wd / "folder1") ==> false + } + test("link") - prepChecker { wd => + intercept[WriteDenied] { + os.remove(rd / "misc/file-symlink") + } + os.exists(rd / "misc/file-symlink", followLinks = false) ==> true + + intercept[WriteDenied] { + os.remove(rd / "misc/folder-symlink") + } + os.exists(rd / "misc/folder-symlink", followLinks = false) ==> true + + intercept[WriteDenied] { + os.remove(rd / "misc/broken-symlink") + } + os.exists(rd / "misc/broken-symlink", followLinks = false) ==> true + os.exists(rd / "misc/broken-symlink") ==> true + + os.remove(wd / "misc/file-symlink") + os.exists(wd / "misc/file-symlink", followLinks = false) ==> false + os.exists(wd / "File.txt", followLinks = false) ==> true + + os.remove(wd / "misc/folder-symlink") + os.exists(wd / "misc/folder-symlink", followLinks = false) ==> false + os.exists(wd / "folder1", followLinks = false) ==> true + os.exists(wd / "folder1/one.txt", followLinks = false) ==> true + + os.remove(wd / "misc/broken-symlink") + os.exists(wd / "misc/broken-symlink", followLinks = false) ==> false + } + test("all") { + test - prepChecker { wd => + intercept[WriteDenied] { + os.remove.all(rd / "folder1") + } + os.list(rd / "folder1") ==> Seq(rd / "folder1/one.txt") + + os.exists(wd / "folder1/one.txt") ==> true + os.remove.all(wd / "folder1") + os.exists(wd / "folder1/one.txt") ==> false + os.exists(wd / "folder1") ==> false + } + test("link") - prepChecker { wd => + intercept[WriteDenied] { + os.remove.all(rd / "misc/file-symlink") + } + os.exists(rd / "misc/file-symlink", followLinks = false) ==> true + + intercept[WriteDenied] { + os.remove.all(rd / "misc/folder-symlink") + } + os.exists(rd / "misc/folder-symlink", followLinks = false) ==> true + + intercept[WriteDenied] { + os.remove.all(rd / "misc/broken-symlink") + } + os.exists(rd / "misc/broken-symlink", followLinks = false) ==> true + + os.remove.all(wd / "misc/file-symlink") + os.exists(wd / "misc/file-symlink", followLinks = false) ==> false + + os.remove.all(wd / "misc/folder-symlink") + os.exists(wd / "misc/folder-symlink", followLinks = false) ==> false + os.exists(wd / "folder1", followLinks = false) ==> true + os.exists(wd / "folder1/one.txt", followLinks = false) ==> true + + os.remove.all(wd / "misc/broken-symlink") + os.exists(wd / "misc/broken-symlink", followLinks = false) ==> false + } + } + } + test("hardlink") - prepChecker { wd => + intercept[ReadDenied] { + os.hardlink(wd / "Linked.txt", rd / "File.txt") + } + os.exists(wd / "Linked.txt") ==> false + + intercept[WriteDenied] { + os.hardlink(rd / "Linked.txt", wd / "File.txt") + } + os.exists(rd / "Linked.txt") ==> false + + os.hardlink(wd / "Linked.txt", wd / "File.txt") + os.exists(wd / "Linked.txt") + os.read(wd / "Linked.txt") ==> "I am cow" + os.isLink(wd / "Linked.txt") ==> false + } + test("symlink") - prepChecker { wd => + intercept[WriteDenied] { + os.symlink(rd / "Linked.txt", wd / "File.txt") + } + os.exists(rd / "Linked.txt") ==> false + + intercept[WriteDenied] { + os.symlink(rd / "Linked.txt", os.rel / "File.txt") + } + os.exists(rd / "Linked.txt") ==> false + + intercept[WriteDenied] { + os.symlink(rd / "LinkedFolder1", wd / "folder1") + } + os.exists(rd / "LinkedFolder1") ==> false + + intercept[WriteDenied] { + os.symlink(rd / "LinkedFolder2", os.rel / "folder1") + } + os.exists(rd / "LinkedFolder2") ==> false + + os.symlink(wd / "Linked.txt", wd / "File.txt") + os.read(wd / "Linked.txt") ==> "I am cow" + os.isLink(wd / "Linked.txt") ==> true + + os.symlink(wd / "Linked2.txt", os.rel / "File.txt") + os.read(wd / "Linked2.txt") ==> "I am cow" + os.isLink(wd / "Linked2.txt") ==> true + + os.symlink(wd / "LinkedFolder1", wd / "folder1") + os.walk(wd / "LinkedFolder1", followLinks = true) ==> Seq(wd / "LinkedFolder1/one.txt") + os.isLink(wd / "LinkedFolder1") ==> true + + os.symlink(wd / "LinkedFolder2", os.rel / "folder1") + os.walk(wd / "LinkedFolder2", followLinks = true) ==> Seq(wd / "LinkedFolder2/one.txt") + os.isLink(wd / "LinkedFolder2") ==> true + } + test("temp") { + test - prepChecker { wd => + val before = os.walk(rd) + intercept[WriteDenied] { + os.temp("default content", dir = rd) + } + os.walk(rd) ==> before + + val tempOne = os.temp("default content", dir = wd) + os.read(tempOne) ==> "default content" + os.write.over(tempOne, "Hello") + os.read(tempOne) ==> "Hello" + } + test("dir") - prepChecker { wd => + val before = os.walk(rd) + intercept[WriteDenied] { + os.temp.dir(dir = rd) + } + os.walk(rd) ==> before + + val tempDir = os.temp.dir(dir = wd) + os.list(tempDir) ==> Nil + os.write(tempDir / "file", "Hello") + os.list(tempDir) ==> Seq(tempDir / "file") + } + } + + test("read") { + test("inputStream") - prepChecker { wd => + os.exists(rd / "File.txt") ==> true + intercept[ReadDenied] { + os.read.inputStream(rd / "File.txt") + } + + val is = os.read.inputStream(wd / "File.txt") // ==> "I am cow" + is.read() ==> 'I' + is.read() ==> ' ' + is.read() ==> 'a' + is.read() ==> 'm' + is.read() ==> ' ' + is.read() ==> 'c' + is.read() ==> 'o' + is.read() ==> 'w' + is.read() ==> -1 + is.close() + } + } + test("write") { + test - prepChecker { wd => + intercept[WriteDenied] { + os.write(rd / "New File.txt", "New File Contents") + } + os.exists(rd / "New File.txt") ==> false + + os.write(wd / "New File.txt", "New File Contents") + os.read(wd / "New File.txt") ==> "New File Contents" + + os.write(wd / "NewBinary.bin", Array[Byte](0, 1, 2, 3)) + os.read.bytes(wd / "NewBinary.bin") ==> Array[Byte](0, 1, 2, 3) + } + test("outputStream") - prepChecker { wd => + intercept[WriteDenied] { + os.write.outputStream(rd / "New File.txt") + } + os.exists(rd / "New File.txt") ==> false + + val out = os.write.outputStream(wd / "New File.txt") + out.write('H') + out.write('e') + out.write('l') + out.write('l') + out.write('o') + out.close() + + os.read(wd / "New File.txt") ==> "Hello" + } + } + test("truncate") - prepChecker { wd => + intercept[WriteDenied] { + os.truncate(rd / "File.txt", 4) + } + Unchecked(os.read(rd / "File.txt")) ==> "I am a restricted cow" + + os.read(wd / "File.txt") ==> "I am cow" + + os.truncate(wd / "File.txt", 4) + os.read(wd / "File.txt") ==> "I am" + } + + test("zip") - prepChecker { wd => + intercept[WriteDenied] { + os.zip( + dest = rd / "zipped.zip", + sources = Seq( + wd / "File.txt", + wd / "folder1" + ) + ) + } + os.exists(rd / "zipped.zip") ==> false + + intercept[ReadDenied] { + os.zip( + dest = wd / "zipped.zip", + sources = Seq( + wd / "File.txt", + rd / "folder1" + ) + ) + } + os.exists(wd / "zipped.zip") ==> false + + val zipFile = os.zip( + wd / "zipped.zip", + Seq( + wd / "File.txt", + wd / "folder1" + ) + ) + + val unzipDir = os.unzip(zipFile, wd / "unzipped") + os.walk(unzipDir).sorted ==> Seq( + unzipDir / "File.txt", + unzipDir / "one.txt" + ) + } + test("unzip") - prepChecker { wd => + val zipFileName = "zipped.zip" + val zipFile: os.Path = os.zip( + dest = wd / zipFileName, + sources = Seq( + wd / "File.txt", + wd / "folder1" + ) + ) + + intercept[WriteDenied] { + os.unzip( + source = zipFile, + dest = rd / "unzipped" + ) + } + os.exists(rd / "unzipped") ==> false + + val unzipDir = os.unzip( + source = zipFile, + dest = wd / "unzipped" + ) + os.walk(unzipDir).length ==> 2 + } + } +} diff --git a/os/test/src/FilesystemMetadataTests.scala b/os/test/src/FilesystemMetadataTests.scala new file mode 100644 index 00000000..f5d654db --- /dev/null +++ b/os/test/src/FilesystemMetadataTests.scala @@ -0,0 +1,76 @@ +package test.os + +import test.os.TestUtil.prep +import utest._ + +object FilesystemMetadataTests extends TestSuite { + + // on unix it is 81 bytes, win adds 3 bytes (3 \r characters) + private val multilineSizes = Set[Long](81, 84) + + def tests = Tests { + test("stat") { + test - prep { wd => + os.stat(wd / "File.txt").size ==> 8 + assert(multilineSizes contains os.stat(wd / "Multi Line.txt").size) + os.stat(wd / "folder1").fileType ==> os.FileType.Dir + } +// test("full"){ +// test - prep{ wd => +// os.stat.full(wd / "File.txt").size ==> 8 +// assert(multilineSizes contains os.stat.full(wd / "Multi Line.txt").size) +// os.stat.full(wd / "folder1").fileType ==> os.FileType.Dir +// } +// } + } + test("isFile") { + test - prep { wd => + os.isFile(wd / "File.txt") ==> true + os.isFile(wd / "folder1") ==> false + + os.isFile(wd / "misc/file-symlink") ==> true + os.isFile(wd / "misc/folder-symlink") ==> false + os.isFile(wd / "misc/file-symlink", followLinks = false) ==> false + } + } + test("isDir") { + test - prep { wd => + os.isDir(wd / "File.txt") ==> false + os.isDir(wd / "folder1") ==> true + + os.isDir(wd / "misc/file-symlink") ==> false + os.isDir(wd / "misc/folder-symlink") ==> true + os.isDir(wd / "misc/folder-symlink", followLinks = false) ==> false + } + } + test("isLink") { + test - prep { wd => + os.isLink(wd / "misc/file-symlink") ==> true + os.isLink(wd / "misc/folder-symlink") ==> true + os.isLink(wd / "folder1") ==> false + } + } + test("size") { + test - prep { wd => + os.size(wd / "File.txt") ==> 8 + assert(multilineSizes contains os.size(wd / "Multi Line.txt")) + } + } + test("mtime") { + test - prep { wd => + os.mtime.set(wd / "File.txt", 0) + os.mtime(wd / "File.txt") ==> 0 + + os.mtime.set(wd / "File.txt", 90000) + os.mtime(wd / "File.txt") ==> 90000 + os.mtime(wd / "misc/file-symlink") ==> 90000 + + os.mtime.set(wd / "misc/file-symlink", 70000) + os.mtime(wd / "File.txt") ==> 70000 + os.mtime(wd / "misc/file-symlink") ==> 70000 + assert(os.mtime(wd / "misc/file-symlink", followLinks = false) != 40000) + + } + } + } +} diff --git a/os/test/src/FilesystemPermissionsTests.scala b/os/test/src/FilesystemPermissionsTests.scala new file mode 100644 index 00000000..32580dc8 --- /dev/null +++ b/os/test/src/FilesystemPermissionsTests.scala @@ -0,0 +1,54 @@ +package test.os + +import test.os.TestUtil.prep +import utest._ + +object FilesystemPermissionsTests extends TestSuite { + def tests = Tests { + test("perms") { + test - prep { wd => + if (Unix()) { + os.perms.set(wd / "File.txt", "rwxrwxrwx") + os.perms(wd / "File.txt").toString() ==> "rwxrwxrwx" + os.perms(wd / "File.txt").toInt() ==> Integer.parseInt("777", 8) + + os.perms.set(wd / "File.txt", Integer.parseInt("755", 8)) + os.perms(wd / "File.txt").toString() ==> "rwxr-xr-x" + + os.perms.set(wd / "File.txt", "r-xr-xr-x") + os.perms.set(wd / "File.txt", Integer.parseInt("555", 8)) + } + } + } + test("owner") { + test - prep { wd => + if (Unix()) { + // Only works as root :( + if (false) { + val originalOwner = os.owner(wd / "File.txt") + + os.owner.set(wd / "File.txt", "nobody") + os.owner(wd / "File.txt").getName ==> "nobody" + + os.owner.set(wd / "File.txt", originalOwner) + } + } + } + } + test("group") { + test - prep { wd => + if (Unix()) { + // Only works as root :( + if (false) { + val originalGroup = os.group(wd / "File.txt") + + os.group.set(wd / "File.txt", "nobody") + os.group(wd / "File.txt").getName ==> "nobody" + + os.group.set(wd / "File.txt", originalGroup) + } + } + } + } + } +} diff --git a/os/test/src/ListingWalkingTests.scala b/os/test/src/ListingWalkingTests.scala new file mode 100644 index 00000000..029e10ac --- /dev/null +++ b/os/test/src/ListingWalkingTests.scala @@ -0,0 +1,106 @@ +package test.os + +import test.os.TestUtil.prep +import utest._ + +object ListingWalkingTests extends TestSuite { + def tests = Tests { + test("list") { + test - prep { wd => + os.list(wd / "folder1") ==> Seq(wd / "folder1/one.txt") + os.list(wd / "folder2") ==> Seq( + wd / "folder2/nestedA", + wd / "folder2/nestedB" + ) + + os.list(wd / "misc/folder-symlink") ==> Seq( + wd / "misc/folder-symlink/one.txt" + ) + } + test("stream") { + test - prep { wd => + os.list.stream(wd / "folder2").count() ==> 2 + + // Streaming the listed files to the console + for (line <- os.list.stream(wd / "folder2")) { + println(line) + } + } + } + } + test("walk") { + test - prep { wd => + os.walk(wd / "folder1") ==> Seq(wd / "folder1/one.txt") + + os.walk(wd / "folder1", includeTarget = true) ==> Seq( + wd / "folder1", + wd / "folder1/one.txt" + ) + + os.walk(wd / "folder2").toSet ==> Set( + wd / "folder2/nestedA", + wd / "folder2/nestedA/a.txt", + wd / "folder2/nestedB", + wd / "folder2/nestedB/b.txt" + ) + + os.walk(wd / "folder2", preOrder = false).toSet ==> Set( + wd / "folder2/nestedA/a.txt", + wd / "folder2/nestedA", + wd / "folder2/nestedB/b.txt", + wd / "folder2/nestedB" + ) + + os.walk(wd / "folder2", maxDepth = 1).toSet ==> Set( + wd / "folder2/nestedA", + wd / "folder2/nestedB" + ) + + os.walk(wd / "folder2", skip = _.last == "nestedA").toSet ==> Set( + wd / "folder2/nestedB", + wd / "folder2/nestedB/b.txt" + ) + + os.walk(wd / "misc/folder-symlink").toSet ==> Set( + wd / "misc/folder-symlink/one.txt" + ) + } + test("attrs") { + test - prep { wd => + if (Unix()) { + val filesSortedBySize = os.walk.attrs(wd / "misc", followLinks = true) + .sortBy { case (p, attrs) => attrs.size } + .collect { case (p, attrs) if attrs.isFile => p } + + filesSortedBySize ==> Seq( + wd / "misc/echo", + wd / "misc/file-symlink", + wd / "misc/echo_with_wd", + wd / "misc/folder-symlink/one.txt", + wd / "misc/binary.png" + ) + } + } + } + test("stream") { + test - prep { wd => + os.walk.stream(wd / "folder1").count() ==> 1 + + os.walk.stream(wd / "folder2").count() ==> 4 + + os.walk.stream(wd / "folder2", skip = _.last == "nestedA").count() ==> 2 + } + test("attrs") { + test - prep { wd => + def totalFileSizes(p: os.Path) = os.walk.stream.attrs(p) + .collect { case (p, attrs) if attrs.isFile => attrs.size } + .sum + + totalFileSizes(wd / "folder1") ==> 22 + totalFileSizes(wd / "folder2") ==> 40 + } + } + } + } + } +} diff --git a/os/test/src/ManipulatingFilesFoldersTests.scala b/os/test/src/ManipulatingFilesFoldersTests.scala new file mode 100644 index 00000000..60627a2d --- /dev/null +++ b/os/test/src/ManipulatingFilesFoldersTests.scala @@ -0,0 +1,277 @@ +package test.os + +import test.os.TestUtil.prep +import utest._ + +object ManipulatingFilesFoldersTests extends TestSuite { + def tests = Tests { + test("exists") { + test - prep { wd => + os.exists(wd / "File.txt") ==> true + os.exists(wd / "folder1") ==> true + os.exists(wd / "doesnt-exist") ==> false + + os.exists(wd / "misc/file-symlink") ==> true + os.exists(wd / "misc/folder-symlink") ==> true + os.exists(wd / "misc/broken-symlink") ==> false + os.exists(wd / "misc/broken-symlink", followLinks = false) ==> true + } + } + test("move") { + test - prep { wd => + os.list(wd / "folder1") ==> Seq(wd / "folder1/one.txt") + os.move(wd / "folder1/one.txt", wd / "folder1/first.txt") + os.list(wd / "folder1") ==> Seq(wd / "folder1/first.txt") + + os.list(wd / "folder2") ==> Seq(wd / "folder2/nestedA", wd / "folder2/nestedB") + os.move(wd / "folder2/nestedA", wd / "folder2/nestedC") + os.list(wd / "folder2") ==> Seq(wd / "folder2/nestedB", wd / "folder2/nestedC") + + os.read(wd / "File.txt") ==> "I am cow" + os.move(wd / "Multi Line.txt", wd / "File.txt", replaceExisting = true) + os.read(wd / "File.txt") ==> + """I am cow + |Hear me moo + |I weigh twice as much as you + |And I look good on the barbecue""".stripMargin + } + test("matching") { + test - prep { wd => + import os.{GlobSyntax, /} + os.walk(wd / "folder2").toSet ==> Set( + wd / "folder2/nestedA", + wd / "folder2/nestedA/a.txt", + wd / "folder2/nestedB", + wd / "folder2/nestedB/b.txt" + ) + + os.walk(wd / "folder2").collect(os.move.matching { case p / g"$x.txt" => p / g"$x.data" }) + + os.walk(wd / "folder2").toSet ==> Set( + wd / "folder2/nestedA", + wd / "folder2/nestedA/a.data", + wd / "folder2/nestedB", + wd / "folder2/nestedB/b.data" + ) + } + } + test("into") { + test - prep { wd => + os.list(wd / "folder1") ==> Seq(wd / "folder1/one.txt") + os.move.into(wd / "File.txt", wd / "folder1") + os.list(wd / "folder1") ==> Seq(wd / "folder1/File.txt", wd / "folder1/one.txt") + } + } + test("over") { + test - prep { wd => + os.list(wd / "folder2") ==> Seq(wd / "folder2/nestedA", wd / "folder2/nestedB") + os.move.over(wd / "folder1", wd / "folder2") + os.list(wd / "folder2") ==> Seq(wd / "folder2/one.txt") + } + } + } + test("copy") { + test - prep { wd => + os.list(wd / "folder1") ==> Seq(wd / "folder1/one.txt") + os.copy(wd / "folder1/one.txt", wd / "folder1/first.txt") + os.list(wd / "folder1") ==> Seq(wd / "folder1/first.txt", wd / "folder1/one.txt") + + os.list(wd / "folder2") ==> Seq(wd / "folder2/nestedA", wd / "folder2/nestedB") + os.copy(wd / "folder2/nestedA", wd / "folder2/nestedC") + os.list(wd / "folder2") ==> Seq( + wd / "folder2/nestedA", + wd / "folder2/nestedB", + wd / "folder2/nestedC" + ) + + os.read(wd / "File.txt") ==> "I am cow" + os.copy(wd / "Multi Line.txt", wd / "File.txt", replaceExisting = true) + os.read(wd / "File.txt") ==> + """I am cow + |Hear me moo + |I weigh twice as much as you + |And I look good on the barbecue""".stripMargin + } + test("into") { + test - prep { wd => + os.list(wd / "folder1") ==> Seq(wd / "folder1/one.txt") + os.copy.into(wd / "File.txt", wd / "folder1") + os.list(wd / "folder1") ==> Seq(wd / "folder1/File.txt", wd / "folder1/one.txt") + } + } + test("over") { + test - prep { wd => + os.list(wd / "folder2") ==> Seq(wd / "folder2/nestedA", wd / "folder2/nestedB") + os.copy.over(wd / "folder1", wd / "folder2") + os.list(wd / "folder2") ==> Seq(wd / "folder2/one.txt") + } + } + test("symlinks") { + val src = os.temp.dir(deleteOnExit = true) + + os.makeDir(src / "t0") + os.write(src / "t0/file", "hello") + os.symlink(src / "t1", os.rel / "t0") + + val dest = os.temp.dir(deleteOnExit = true) + + os.copy(src / "t0", dest / "t0", followLinks = false, replaceExisting = false) + os.copy(src / "t1", dest / "t1", followLinks = false, replaceExisting = false) + + val src_list = os.walk(src, includeTarget = false, followLinks = false) + .map(_ relativeTo src) + .sorted + val dest_list = os.walk(dest, includeTarget = false, followLinks = false) + .map(_ relativeTo dest) + .sorted + + assert(dest_list == src_list) + + src_list.foreach { r => + val src_path = src / r + val dest_path = dest / r + + if (os.isFile(src_path, followLinks = false)) { + assert(os.isFile(dest_path, followLinks = false)) + assert(os.read(src_path) == os.read(dest_path)) + } else if (os.isLink(src_path)) { + assert(os.isLink(dest_path)) + assert(os.readLink(src_path) == os.readLink(dest_path)) + } else if (os.isDir(src_path, followLinks = false)) { + assert(os.isDir(dest_path, followLinks = false)) + val s = os.list(src_path, sort = true).map(_ relativeTo src).toList + val d = os.list(dest_path, sort = true).map(_ relativeTo dest).toList + assert(d == s) + } else { + assert(false) + } + + } + } + } + test("makeDir") { + test - prep { wd => + os.exists(wd / "new_folder") ==> false + os.makeDir(wd / "new_folder") + os.exists(wd / "new_folder") ==> true + } + test("all") { + test - prep { wd => + os.exists(wd / "new_folder") ==> false + os.makeDir.all(wd / "new_folder/inner/deep") + os.exists(wd / "new_folder/inner/deep") ==> true + } + } + } + test("remove") { + test - prep { wd => + os.exists(wd / "File.txt") ==> true + os.remove(wd / "File.txt") + os.exists(wd / "File.txt") ==> false + + os.exists(wd / "folder1/one.txt") ==> true + os.remove(wd / "folder1/one.txt") + os.remove(wd / "folder1") + os.exists(wd / "folder1/one.txt") ==> false + os.exists(wd / "folder1") ==> false + } + test("link") { + test - prep { wd => + os.remove(wd / "misc/file-symlink") + os.exists(wd / "misc/file-symlink", followLinks = false) ==> false + os.exists(wd / "File.txt", followLinks = false) ==> true + + os.remove(wd / "misc/folder-symlink") + os.exists(wd / "misc/folder-symlink", followLinks = false) ==> false + os.exists(wd / "folder1", followLinks = false) ==> true + os.exists(wd / "folder1/one.txt", followLinks = false) ==> true + + os.remove(wd / "misc/broken-symlink") + os.exists(wd / "misc/broken-symlink", followLinks = false) ==> false + } + } + test("all") { + test - prep { wd => + os.exists(wd / "folder1/one.txt") ==> true + os.remove.all(wd / "folder1") + os.exists(wd / "folder1/one.txt") ==> false + os.exists(wd / "folder1") ==> false + } + test("link") { + test - prep { wd => + os.remove.all(wd / "misc/file-symlink") + os.exists(wd / "misc/file-symlink", followLinks = false) ==> false + os.exists(wd / "File.txt", followLinks = false) ==> true + + os.remove.all(wd / "misc/folder-symlink") + os.exists(wd / "misc/folder-symlink", followLinks = false) ==> false + os.exists(wd / "folder1", followLinks = false) ==> true + os.exists(wd / "folder1/one.txt", followLinks = false) ==> true + + os.remove.all(wd / "misc/broken-symlink") + os.exists(wd / "misc/broken-symlink", followLinks = false) ==> false + } + } + } + } + test("hardlink") { + test - prep { wd => + os.hardlink(wd / "Linked.txt", wd / "File.txt") + os.exists(wd / "Linked.txt") + os.read(wd / "Linked.txt") ==> "I am cow" + os.isLink(wd / "Linked.txt") ==> false + } + } + test("symlink") { + test - prep { wd => + os.symlink(wd / "Linked.txt", wd / "File.txt") + os.exists(wd / "Linked.txt") + os.read(wd / "Linked.txt") ==> "I am cow" + os.isLink(wd / "Linked.txt") ==> true + + os.symlink(wd / "Linked2.txt", os.rel / "File.txt") + os.exists(wd / "Linked2.txt") + os.read(wd / "Linked2.txt") ==> "I am cow" + os.isLink(wd / "Linked2.txt") ==> true + } + } + test("followLink") { + test - prep { wd => + os.followLink(wd / "misc/file-symlink") ==> Some(wd / "File.txt") + os.followLink(wd / "misc/folder-symlink") ==> Some(wd / "folder1") + os.followLink(wd / "misc/broken-symlink") ==> None + } + } + test("readLink") { + test - prep { wd => + if (Unix()) { + os.readLink(wd / "misc/file-symlink") ==> os.up / "File.txt" + os.readLink(wd / "misc/folder-symlink") ==> os.up / "folder1" + os.readLink(wd / "misc/broken-symlink") ==> os.rel / "broken" + os.readLink(wd / "misc/broken-abs-symlink") ==> os.root / "doesnt/exist" + + os.readLink.absolute(wd / "misc/file-symlink") ==> wd / "File.txt" + os.readLink.absolute(wd / "misc/folder-symlink") ==> wd / "folder1" + os.readLink.absolute(wd / "misc/broken-symlink") ==> wd / "misc/broken" + os.readLink.absolute(wd / "misc/broken-abs-symlink") ==> os.root / "doesnt/exist" + } + } + } + test("temp") { + test - prep { wd => + val tempOne = os.temp("default content") + os.read(tempOne) ==> "default content" + os.write.over(tempOne, "Hello") + os.read(tempOne) ==> "Hello" + } + test("dir") { + test - prep { wd => + val tempDir = os.temp.dir() + os.list(tempDir) ==> Nil + os.write(tempDir / "file", "Hello") + os.list(tempDir) ==> Seq(tempDir / "file") + } + } + } + } +} diff --git a/os/test/src/OpTests.scala b/os/test/src/OpTests.scala index 212f4dc8..132ecbe8 100644 --- a/os/test/src/OpTests.scala +++ b/os/test/src/OpTests.scala @@ -5,10 +5,12 @@ import java.nio.{file => nio} import utest._ import os.{GlobSyntax, /} +import java.nio.charset.Charset + object OpTests extends TestSuite { val tests = Tests { - val res = os.pwd / "os" / "test" / "resources" / "test" + val res = os.Path(sys.env("OS_TEST_RESOURCE_FOLDER")) / "test" test("ls") - assert( os.list(res).toSet == Set( res / "folder1", @@ -19,20 +21,237 @@ object OpTests extends TestSuite { res / "Multi Line.txt" ), os.list(res / "folder2").toSet == Set( - res / "folder2" / "nestedA", - res / "folder2" / "nestedB" + res / "folder2/nestedA", + res / "folder2/nestedB" ) ) test("rm") { // shouldn't crash - os.remove.all(os.pwd / "out" / "scratch" / "nonexistent") + os.remove.all(os.pwd / "out/scratch/nonexistent") // shouldn't crash - os.remove(os.pwd / "out" / "scratch" / "nonexistent") ==> false + os.remove(os.pwd / "out/scratch/nonexistent") ==> false // should crash intercept[NoSuchFileException] { - os.remove(os.pwd / "out" / "scratch" / "nonexistent", checkExists = true) + os.remove(os.pwd / "out/scratch/nonexistent", checkExists = true) + } + } + test("lsR") { + os.walk(res).foreach(println) + intercept[java.nio.file.NoSuchFileException]( + os.walk(os.pwd / "out/scratch/nonexistent") + ) + assert( + os.walk(res / "folder2/nestedB") == Seq(res / "folder2/nestedB/b.txt"), + os.walk(res / "folder2").toSet == Set( + res / "folder2/nestedA", + res / "folder2/nestedA/a.txt", + res / "folder2/nestedB", + res / "folder2/nestedB/b.txt" + ) + ) + } + test("Mutating") { + val testFolder = os.pwd / "out/scratch/test" + os.remove.all(testFolder) + os.makeDir.all(testFolder) + test("cp") { + val d = testFolder / "copying" + test("basic") { + assert( + !os.exists(d / "folder"), + !os.exists(d / "file") + ) + os.makeDir.all(d / "folder") + os.write(d / "file", "omg") + assert( + os.exists(d / "folder"), + os.exists(d / "file"), + os.read(d / "file") == "omg" + ) + os.copy(d / "folder", d / "folder2") + os.copy(d / "file", d / "file2") + + assert( + os.exists(d / "folder"), + os.exists(d / "file"), + os.read(d / "file") == "omg", + os.exists(d / "folder2"), + os.exists(d / "file2"), + os.read(d / "file2") == "omg" + ) + } + test("deep") { + os.write(d / "folderA/folderB/file", "Cow", createFolders = true) + os.copy(d / "folderA", d / "folderC") + assert(os.read(d / "folderC/folderB/file") == "Cow") + } + test("merging") { + val mergeDir = d / "merge" + os.write(mergeDir / "folderA/folderB/file", "Cow", createFolders = true) + os.write(mergeDir / "folderC/file", "moo", createFolders = true) + os.copy(mergeDir / "folderA", mergeDir / "folderC", mergeFolders = true) + assert(os.read(mergeDir / "folderC/folderB/file") == "Cow") + assert(os.read(mergeDir / "folderC/file") == "moo") + } + } + test("mv") { + test("basic") { + val d = testFolder / "moving" + os.makeDir.all(d / "folder") + assert(os.list(d) == Seq(d / "folder")) + os.move(d / "folder", d / "folder2") + assert(os.list(d) == Seq(d / "folder2")) + } + test("shallow") { + val d = testFolder / "moving2" + os.makeDir(d) + os.write(d / "A.scala", "AScala") + os.write(d / "B.scala", "BScala") + os.write(d / "A.py", "APy") + os.write(d / "B.py", "BPy") + def fileSet = os.list(d).map(_.last).toSet + assert(fileSet == Set("A.scala", "B.scala", "A.py", "B.py")) + test("partialMoves") { + os.list(d).collect(os.move.matching { case p / g"$x.scala" => p / g"$x.java" }) + assert(fileSet == Set("A.java", "B.java", "A.py", "B.py")) + os.list(d).collect(os.move.matching { case p / g"A.$x" => p / g"C.$x" }) + assert(fileSet == Set("C.java", "B.java", "C.py", "B.py")) + } + test("fullMoves") { + os.list(d).map(os.move.matching { case p / g"$x.$y" => p / g"$y.$x" }) + assert(fileSet == Set("scala.A", "scala.B", "py.A", "py.B")) + def die = os.list(d).map(os.move.matching { case p / g"A.$x" => p / g"C.$x" }) + intercept[MatchError] { die } + } + } + + test("deep") { + val d = testFolder / "moving2" + os.makeDir(d) + os.makeDir(d / "scala") + os.write(d / "scala/A", "AScala") + os.write(d / "scala/B", "BScala") + os.makeDir(d / "py") + os.write(d / "py/A", "APy") + os.write(d / "py/B", "BPy") + test("partialMoves") { + os.walk(d).collect(os.move.matching { case d / "py" / x => d / x }) + assert( + os.walk(d).toSet == Set( + d / "py", + d / "scala", + d / "scala/A", + d / "scala/B", + d / "A", + d / "B" + ) + ) + } + test("fullMoves") { + def die = os.walk(d).map(os.move.matching { case d / "py" / x => d / x }) + intercept[MatchError] { die } + + os.walk(d).filter(os.isFile).map(os.move.matching { + case d / "py" / x => d / "scala/py" / x + case d / "scala" / x => d / "py/scala" / x + case d => println("NOT FOUND " + d); d + }) + + assert( + os.walk(d).toSet == Set( + d / "py", + d / "scala", + d / "py/scala", + d / "scala/py", + d / "scala/py/A", + d / "scala/py/B", + d / "py/scala/A", + d / "py/scala/B" + ) + ) + } + } + // ls! wd | mv* + } + + test("mkdirRm") { + test("singleFolder") { + val single = testFolder / "single" + os.makeDir.all(single / "inner") + assert(os.list(single) == Seq(single / "inner")) + os.remove(single / "inner") + assert(os.list(single) == Seq()) + } + test("nestedFolders") { + val nested = testFolder / "nested" + os.makeDir.all(nested / "inner/innerer/innerest") + assert( + os.list(nested) == Seq(nested / "inner"), + os.list(nested / "inner") == Seq(nested / "inner/innerer"), + os.list(nested / "inner/innerer") == Seq(nested / "inner/innerer/innerest") + ) + os.remove.all(nested / "inner") + assert(os.list(nested) == Seq()) + } + } + + test("readWrite") { + val d = testFolder / "readWrite" + os.makeDir.all(d) + test("simple") { + os.write(d / "file", "i am a cow") + assert(os.read(d / "file") == "i am a cow") + } + test("autoMkdir") { + os.write(d / "folder/folder/file", "i am a cow", createFolders = true) + assert(os.read(d / "folder/folder/file") == "i am a cow") + } + test("binary") { + os.write(d / "file", Array[Byte](1, 2, 3, 4)) + assert(os.read(d / "file").toSeq == Array[Byte](1, 2, 3, 4).toSeq) + } + test("concatenating") { + os.write(d / "concat1", Seq("a", "b", "c")) + assert(os.read(d / "concat1") == "abc") + os.write(d / "concat2", Seq(Array[Byte](1, 2), Array[Byte](3, 4))) + assert(os.read.bytes(d / "concat2").toSeq == Array[Byte](1, 2, 3, 4).toSeq) + os.write(d / "concat3", geny.Generator(Array[Byte](1, 2), Array[Byte](3, 4))) + assert(os.read.bytes(d / "concat3").toSeq == Array[Byte](1, 2, 3, 4).toSeq) + } + test("writeAppend") { + os.write.append(d / "append.txt", "Hello") + assert(os.read(d / "append.txt") == "Hello") + os.write.append(d / "append.txt", " World") + assert(os.read(d / "append.txt") == "Hello World") + } + test("writeOver") { + os.write.over(d / "append.txt", "Hello") + assert(os.read(d / "append.txt") == "Hello") + os.write.over(d / "append.txt", " Wor") + assert(os.read(d / "append.txt") == " Wor") + } + + } + + test("Failures") { + val d = testFolder / "failures" + os.makeDir.all(d) + test("nonexistant") { + + test - intercept[nio.NoSuchFileException](os.read(d / "nonexistent")) + test - intercept[nio.NoSuchFileException](os.copy(d / "nonexistent", d / "yolo")) + test - intercept[nio.NoSuchFileException](os.move(d / "nonexistent", d / "yolo")) + } + test("collisions") { + os.makeDir.all(d / "folder") + os.write(d / "file", "lolol") + test - intercept[nio.FileAlreadyExistsException](os.move(d / "file", d / "folder")) + test - intercept[nio.FileAlreadyExistsException](os.copy(d / "file", d / "folder")) + test - intercept[nio.FileAlreadyExistsException](os.write(d / "file", "lols")) + } } } + } } diff --git a/os/test/src/PathTests.scala b/os/test/src/PathTests.scala index 98aa6ae4..85a27611 100644 --- a/os/test/src/PathTests.scala +++ b/os/test/src/PathTests.scala @@ -2,16 +2,140 @@ package test.os import java.nio.file.Paths import java.io.File - import os._ -import os.Path.{driveRoot} +import os.Path.driveRoot import utest.{assert => _, _} + import java.net.URI object PathTests extends TestSuite { + private def nonCanonicalLiteral(providedLiteral: String, sanitizedLiteral: String) = + s"Literal path sequence [$providedLiteral] used in OS-Lib must be in a canonical form, please use [$sanitizedLiteral] instead" + private def removeLiteralErr(literal: String) = + s"Literal path sequence [$literal] doesn't affect path being formed, please remove it" + val tests = Tests { + test("Literals") { + test("implicitConstructors") { + test("valid") { + val p: os.Path = "/hello/world" + val s: os.SubPath = "hello/world" + val r: os.RelPath = "../hello/world" + assert(p == os.Path("/hello/world")) + assert(s == os.SubPath("hello/world")) + assert(r == os.RelPath("../hello/world")) + } + test("invalidLiteral") { + val err1 = compileError("""val p: os.Path = "hello/world" """) + assert(err1.msg.contains("Invalid absolute path literal: \"hello/world\"")) + + val err2 = compileError("""val s: os.SubPath = "../hello/world" """) + assert(err2.msg.contains("Invalid subpath literal: \"../hello/world\"")) + + val err3 = compileError("""val s: os.SubPath = "/hello/world" """) + assert(err3.msg.contains("Invalid subpath literal: \"/hello/world\"")) + + val err4 = compileError("""val r: os.RelPath = "/hello/world" """) + assert(err4.msg.contains("Invalid relative path literal: \"/hello/world\"")) + } + test("nonLiteral") { + val err1 = compileError("""val str = "hello/world"; val p: os.Path = str """) + assert(err1.msg.contains("Invalid absolute path literal: str")) + + val err2 = compileError("""val str = "/hello/world"; val s: os.SubPath = str """) + assert(err2.msg.contains("Invalid subpath literal: str")) + + val err3 = compileError("""val str = "/hello/world"; val r: os.RelPath = str""") + assert(err3.msg.contains("Invalid relative path literal: str")) + } + } + test("Basic") { + assert(rel / "src" / "Main/.scala" == rel / "src" / "Main" / ".scala") + assert(root / "core/src/test" == root / "core" / "src" / "test") + assert(root / "core/src/test" == root / "core" / "src/test") + } + test("literals with [..]") { + + assert(rel / "src" / ".." == rel / "src" / os.up) + assert(root / "src" / ".." == root / "src" / os.up) + assert(root / "hello" / ".." / "world" == root / "hello" / os.up / "world") + assert(root / "hello" / "../world" == root / "hello" / os.up / "world") + } + + test("Compile errors") { + + compileError("""root / "src/../foo"""").check("", nonCanonicalLiteral("src/../foo", "foo")) + compileError("""root / "hello/../world"""").check( + "", + nonCanonicalLiteral("hello/../world", "world") + ) + compileError("""root / "src/../foo/bar"""").check( + "", + nonCanonicalLiteral("src/../foo/bar", "foo/bar") + ) + compileError("""root / "src/../foo/bar/.."""").check( + "", + nonCanonicalLiteral("src/../foo/bar/..", "foo") + ) + compileError("""root / "src/../foo/../bar/."""").check( + "", + nonCanonicalLiteral("src/../foo/../bar/.", "bar") + ) + compileError("""root / "src/foo/./.."""").check( + "", + nonCanonicalLiteral("src/foo/./..", "src") + ) + compileError("""root / "src/foo//./.."""").check( + "", + nonCanonicalLiteral("src/foo//./..", "src") + ) + + compileError("""root / "src/.."""").check("", removeLiteralErr("src/..")) + compileError("""root / "src/../foo/.."""").check("", removeLiteralErr("src/../foo/..")) + compileError("""root / "src/foo/../.."""").check("", removeLiteralErr("src/foo/../..")) + compileError("""root / "src/foo/./../.."""").check("", removeLiteralErr("src/foo/./../..")) + compileError("""root / "src/./foo/./../.."""").check( + "", + removeLiteralErr("src/./foo/./../..") + ) + compileError("""root / "src///foo/./../.."""").check( + "", + removeLiteralErr("src///foo/./../..") + ) + + compileError("""root / "/" """).check("", removeLiteralErr("/")) + compileError("""root / "/ " """).check("", nonCanonicalLiteral("/ ", " ")) + compileError("""root / " /" """).check("", nonCanonicalLiteral(" /", " ")) + compileError("""root / "//" """).check("", removeLiteralErr("//")) + + compileError("""root / "foo/" """).check("", nonCanonicalLiteral("foo/", "foo")) + compileError("""root / "foo//" """).check("", nonCanonicalLiteral("foo//", "foo")) + + compileError("""root / "foo/bar/" """).check("", nonCanonicalLiteral("foo/bar/", "foo/bar")) + compileError("""root / "foo/bar//" """).check( + "", + nonCanonicalLiteral("foo/bar//", "foo/bar") + ) + + compileError("""root / "/foo" """).check("", nonCanonicalLiteral("/foo", "foo")) + compileError("""root / "//foo" """).check("", nonCanonicalLiteral("//foo", "foo")) + + compileError("""root / "//foo/" """).check("", nonCanonicalLiteral("//foo/", "foo")) + + compileError(""" rel / "src" / "" """).check("", removeLiteralErr("")) + compileError(""" rel / "src" / "." """).check("", removeLiteralErr(".")) + + compileError(""" root / "src/" """).check("", nonCanonicalLiteral("src/", "src")) + compileError(""" root / "src/." """).check("", nonCanonicalLiteral("src/.", "src")) + + compileError(""" root / "" """).check("", removeLiteralErr("")) + compileError(""" root / "." """).check("", removeLiteralErr(".")) + + } + } test("Basic") { val base = rel / "src" / "main" / "scala" val subBase = sub / "src" / "main" / "scala" + test("Transform posix paths") { // verify posix string format of driveRelative path assert(posix(root / "omg") == posix(Paths.get("/omg").toAbsolutePath)) @@ -279,29 +403,37 @@ object PathTests extends TestSuite { } } test("Errors") { + def nonLiteral(s: String) = s + test("InvalidChars") { - val ex = intercept[PathError.InvalidSegment](rel / "src" / "Main/.scala") + val ex = intercept[PathError.InvalidSegment](rel / "src" / nonLiteral("Main/.scala")) val PathError.InvalidSegment("Main/.scala", msg1) = ex - assert(msg1.contains("[/] is not a valid character to appear in a path segment")) + assert( + msg1.contains( + "[/] is not a valid character to appear in a non-literal path segment. If you are " + + "dealing with dynamic path-strings coming from external sources, use the " + + "Path(...)/RelPath(...)/SubPath(...) constructor calls to convert them." + ) + ) - val ex2 = intercept[PathError.InvalidSegment](root / "hello" / ".." / "world") + val ex2 = intercept[PathError.InvalidSegment](root / "hello" / nonLiteral("..") / "world") val PathError.InvalidSegment("..", msg2) = ex2 assert(msg2.contains("use the `up` segment from `os.up`")) } test("InvalidSegments") { - intercept[PathError.InvalidSegment] { root / "core/src/test" } - intercept[PathError.InvalidSegment] { root / "" } - intercept[PathError.InvalidSegment] { root / "." } - intercept[PathError.InvalidSegment] { root / ".." } + intercept[PathError.InvalidSegment] { root / nonLiteral("core/src/test") } + intercept[PathError.InvalidSegment] { root / nonLiteral("") } + intercept[PathError.InvalidSegment] { root / nonLiteral(".") } + intercept[PathError.InvalidSegment] { root / nonLiteral("..") } } test("EmptySegment") { - intercept[PathError.InvalidSegment](rel / "src" / "") - intercept[PathError.InvalidSegment](rel / "src" / ".") - intercept[PathError.InvalidSegment](rel / "src" / "..") + intercept[PathError.InvalidSegment](rel / "src" / nonLiteral("")) + intercept[PathError.InvalidSegment](rel / "src" / nonLiteral(".")) + intercept[PathError.InvalidSegment](rel / "src" / nonLiteral("..")) } test("CannotRelativizeAbsAndRel") { val abs = pwd @@ -434,6 +566,14 @@ object PathTests extends TestSuite { System.err.printf("p[%s]\n", posix(p)) assert(posix(p) contains "/omg") } + test("dynamicPwd") { + val x = os.pwd + val y = os.dynamicPwd.withValue(os.pwd / "hello") { + os.pwd + } + + assert(x / "hello" == y) + } } // compare absolute paths def sameFile(a: java.nio.file.Path, b: java.nio.file.Path): Boolean = { diff --git a/os/test/src/PathTestsJvmOnly.scala b/os/test/src/PathTestsJvmOnly.scala new file mode 100644 index 00000000..fbb23ad7 --- /dev/null +++ b/os/test/src/PathTestsJvmOnly.scala @@ -0,0 +1,50 @@ +package test.os + +import java.nio.file.Paths + +import os._ +import utest._ +import java.util.HashMap +import java.nio.file.FileSystems +import java.net.URI + +object PathTestsJvmOnly extends TestSuite { + val tests = Tests { + test("construction") { + test("symlinks") { + + val names = Seq("test123", "test124", "test125", "test126") + val twd = temp.dir() + + test("nestedSymlinks") { + if (Unix()) { + names.foreach(p => os.remove.all(twd / p)) + os.makeDir.all(twd / "test123") + os.symlink(twd / "test124", twd / "test123") + os.symlink(twd / "test125", twd / "test124") + os.symlink(twd / "test126", twd / "test125") + assert(followLink(twd / "test126").get == followLink(twd / "test123").get) + names.foreach(p => os.remove(twd / p)) + names.foreach(p => assert(!exists(twd / p))) + } + } + + test("danglingSymlink") { + if (Unix()) { + names.foreach(p => os.remove.all(twd / p)) + os.makeDir.all(twd / "test123") + os.symlink(twd / "test124", twd / "test123") + os.symlink(twd / "test125", twd / "test124") + os.symlink(twd / "test126", twd / "test125") + os.remove(twd / "test123") + assert(followLink(twd / "test126").isEmpty) + names.foreach(p => os.remove.all(twd / p)) + names.foreach(p => assert(!exists(twd / p))) + names.foreach(p => os.remove.all(twd / p)) + names.foreach(p => assert(!exists(twd / p))) + } + } + } + } + } +} diff --git a/os/test/src/ReadingWritingTests.scala b/os/test/src/ReadingWritingTests.scala new file mode 100644 index 00000000..12d2b200 --- /dev/null +++ b/os/test/src/ReadingWritingTests.scala @@ -0,0 +1,137 @@ +package test.os +import utest._ +import TestUtil._ +object ReadingWritingTests extends TestSuite { + def tests = Tests { + test("read") { + test - prep { wd => + os.read(wd / "File.txt") ==> "I am cow" + os.read(wd / "folder1/one.txt") ==> "Contents of folder one" + os.read(wd / "Multi Line.txt") ==> + """I am cow + |Hear me moo + |I weigh twice as much as you + |And I look good on the barbecue""".stripMargin + } + test("inputStream") { + test - prep { wd => + val is = os.read.inputStream(wd / "File.txt") // ==> "I am cow" + is.read() ==> 'I' + is.read() ==> ' ' + is.read() ==> 'a' + is.read() ==> 'm' + is.read() ==> ' ' + is.read() ==> 'c' + is.read() ==> 'o' + is.read() ==> 'w' + is.read() ==> -1 + is.close() + } + } + test("bytes") { + test - prep { wd => + os.read.bytes(wd / "File.txt") ==> "I am cow".getBytes + os.read.bytes(wd / "misc/binary.png").length ==> 711 + } + } + test("chunks") { + test - prep { wd => + val chunks = os.read.chunks(wd / "File.txt", chunkSize = 2) + .map { case (buf, n) => buf.take(n).toSeq } // copy the buffer to save the data + .toSeq + + chunks ==> Seq( + Seq[Byte]('I', ' '), + Seq[Byte]('a', 'm'), + Seq[Byte](' ', 'c'), + Seq[Byte]('o', 'w') + ) + } + } + + test("lines") { + test - prep { wd => + os.read.lines(wd / "File.txt") ==> Seq("I am cow") + os.read.lines(wd / "Multi Line.txt") ==> Seq( + "I am cow", + "Hear me moo", + "I weigh twice as much as you", + "And I look good on the barbecue" + ) + } + test("stream") { + test - prep { wd => + os.read.lines.stream(wd / "File.txt").count() ==> 1 + os.read.lines.stream(wd / "Multi Line.txt").count() ==> 4 + + // Streaming the lines to the console + for (line <- os.read.lines.stream(wd / "Multi Line.txt")) { + println(line) + } + } + } + } + } + + test("write") { + test - prep { wd => + os.write(wd / "New File.txt", "New File Contents") + os.read(wd / "New File.txt") ==> "New File Contents" + + os.write(wd / "NewBinary.bin", Array[Byte](0, 1, 2, 3)) + os.read.bytes(wd / "NewBinary.bin") ==> Array[Byte](0, 1, 2, 3) + } + test("append") { + test - prep { wd => + os.read(wd / "File.txt") ==> "I am cow" + + os.write.append(wd / "File.txt", ", hear me moo") + os.read(wd / "File.txt") ==> "I am cow, hear me moo" + + os.write.append(wd / "File.txt", ",\nI weigh twice as much as you") + os.read(wd / "File.txt") ==> + "I am cow, hear me moo,\nI weigh twice as much as you" + + os.read.bytes(wd / "misc/binary.png").length ==> 711 + os.write.append(wd / "misc/binary.png", Array[Byte](1, 2, 3)) + os.read.bytes(wd / "misc/binary.png").length ==> 714 + } + } + test("over") { + test - prep { wd => + os.read(wd / "File.txt") ==> "I am cow" + os.write.over(wd / "File.txt", "You are cow") + + os.read(wd / "File.txt") ==> "You are cow" + + os.write.over(wd / "File.txt", "We ", truncate = false) + os.read(wd / "File.txt") ==> "We are cow" + + os.write.over(wd / "File.txt", "s", offset = 8, truncate = false) + os.read(wd / "File.txt") ==> "We are sow" + } + } + test("outputStream") { + test - prep { wd => + val out = os.write.outputStream(wd / "New File.txt") + out.write('H') + out.write('e') + out.write('l') + out.write('l') + out.write('o') + out.close() + + os.read(wd / "New File.txt") ==> "Hello" + } + } + } + test("truncate") { + test - prep { wd => + os.read(wd / "File.txt") ==> "I am cow" + + os.truncate(wd / "File.txt", 4) + os.read(wd / "File.txt") ==> "I am" + } + } + } +} diff --git a/os/test/src/SegmentsFromStringTests.scala b/os/test/src/SegmentsFromStringTests.scala new file mode 100644 index 00000000..a3e72738 --- /dev/null +++ b/os/test/src/SegmentsFromStringTests.scala @@ -0,0 +1,39 @@ +package os + +import os.PathChunk.segmentsFromString +import utest.{assert => _, _} + +object SegmentsFromStringTests extends TestSuite { + + val tests = Tests { + test("segmentsFromString") { + def testSegmentsFromString(s: String, expected: List[String]) = { + assert(segmentsFromString(s).sameElements(expected)) + } + + testSegmentsFromString(" ", List(" ")) + + testSegmentsFromString("", List("")) + + testSegmentsFromString("""foo/bar/baz""", List("foo", "bar", "baz")) + + testSegmentsFromString("""/""", List("", "")) + testSegmentsFromString("""//""", List("", "", "")) + testSegmentsFromString("""///""", List("", "", "", "")) + + testSegmentsFromString("""a/""", List("a", "")) + testSegmentsFromString("""a//""", List("a", "", "")) + testSegmentsFromString("""a///""", List("a", "", "", "")) + + testSegmentsFromString("""ahs/""", List("ahs", "")) + testSegmentsFromString("""ahs//""", List("ahs", "", "")) + + testSegmentsFromString("""ahs/aa/""", List("ahs", "aa", "")) + testSegmentsFromString("""ahs/aa//""", List("ahs", "aa", "", "")) + + testSegmentsFromString("""/a""", List("", "a")) + testSegmentsFromString("""//a""", List("", "", "a")) + testSegmentsFromString("""//a/""", List("", "", "a", "")) + } + } +} diff --git a/os/test/src/SourceTests.scala b/os/test/src/SourceTests.scala new file mode 100644 index 00000000..54a0a4f4 --- /dev/null +++ b/os/test/src/SourceTests.scala @@ -0,0 +1,21 @@ +package test.os +import utest.{assert => _, _} + +object SourceTests extends TestSuite { + + val tests = Tests { + test("contentMetadata") - TestUtil.prep { wd => + // content type for all files is just treated as application/octet-stream, + // we do not do any clever mime-type inference or guessing + (wd / "folder1/one.txt").toSource.httpContentType ==> Some("application/octet-stream") + // length is taken from the filesystem at the moment at which `.toSource` is called + (wd / "folder1/one.txt").toSource.contentLength ==> Some(22) + (wd / "File.txt").toSource.contentLength ==> Some(8) + + // Make sure the `Writable` returned by `os.read.stream` propagates the content length + os.read.stream(wd / "folder1/one.txt").contentLength ==> Some(22) + // Even when converted to an `os.Source` + (os.read.stream(wd / "folder1/one.txt"): os.Source).contentLength ==> Some(22) + } + } +} diff --git a/os/test/src/SubprocessTests.scala b/os/test/src/SubprocessTests.scala new file mode 100644 index 00000000..5743304d --- /dev/null +++ b/os/test/src/SubprocessTests.scala @@ -0,0 +1,252 @@ +package test.os + +import java.io._ +import java.nio.charset.StandardCharsets + +import os._ +import utest._ + +import scala.collection.mutable + +object SubprocessTests extends TestSuite { + val scriptFolder = os.Path(sys.env("OS_TEST_RESOURCE_FOLDER")) / "test" + + val lsCmd = if (scala.util.Properties.isWin) "dir" else "ls" + + val tests = Tests { + test("lines") { + val res = TestUtil.proc(lsCmd, scriptFolder).call() + assert( + res.out.lines().exists(_.contains("File.txt")), + res.out.lines().exists(_.contains("folder1")), + res.out.lines().exists(_.contains("folder2")) + ) + } + test("string") { + val res = TestUtil.proc(lsCmd, scriptFolder).call() + assert( + res.out.text().contains("File.txt"), + res.out.text().contains("folder1"), + res.out.text().contains("folder2") + ) + } + test("bytes") { + if (Unix()) { + val res = proc(scriptFolder / "misc/echo", "abc").call() + val listed = res.out.bytes + listed ==> "abc\n".getBytes + } + } + test("chained") { + proc("git", "init").call() + os.write.over(os.pwd / "Readme.adoc", "hello") + assert( + proc("git", "init").call().out.text().contains("Reinitialized existing Git repository"), + proc("git", "init").call().out.text().contains("Reinitialized existing Git repository"), + TestUtil.proc(lsCmd, pwd).call().out.text().contains("Readme.adoc") + ) + } + test("basicList") { + val files = List("Readme.adoc", "build.sc") + os.write.over(os.pwd / "Readme.adoc", "hello") + os.write.over(os.pwd / "build.sc", "world") + val output = TestUtil.proc(lsCmd, files).call().out.text() + assert(files.forall(output.contains)) + } + test("listMixAndMatch") { + val stuff = List("I", "am", "bovine") + val result = TestUtil.proc("echo", "Hello,", stuff, "hear me roar").call() + if (Unix()) + assert(result.out.text().contains("Hello, " + stuff.mkString(" ") + " hear me roar")) + else // win quotes multiword args + assert(result.out.text().contains("Hello, " + stuff.mkString(" ") + " \"hear me roar\"")) + } + test("failures") { + val ex = intercept[os.SubprocessException] { + TestUtil.proc(lsCmd, "does-not-exist").call(check = true, stderr = os.Pipe) + } + val res: CommandResult = ex.result + assert( + res.exitCode != 0, + res.err.text().contains("No such file or directory") || // unix + res.err.text().contains("File Not Found") // win + ) + } + + test("filebased") { + if (Unix()) { + assert(proc(scriptFolder / "misc/echo", "HELLO").call().out.lines().mkString == "HELLO") + + val res: CommandResult = + proc(root / "bin/bash", "-c", "echo 'Hello'$ENV_ARG").call( + env = Map("ENV_ARG" -> "123") + ) + + assert(res.out.text().trim() == "Hello123") + } + } + test("filebased2") { + if (Unix()) { + val possiblePaths = Seq(root / "bin", root / "usr/bin").map { pfx => pfx / "echo" } + val res = proc("which", "echo").call() + val echoRoot = Path(res.out.text().trim()) + assert(possiblePaths.contains(echoRoot)) + + assert(proc(echoRoot, "HELLO").call().out.lines() == Seq("HELLO")) + } + } + + test("charSequence") { + val charSequence = new StringBuilder("This is a CharSequence") + val cmd = Seq( + "echo", + charSequence + ) + val res = proc(cmd).call() + assert(res.out.text().trim() == charSequence.toString()) + } + + test("envArgs") { + if (Unix()) { + locally { + val res0 = proc("bash", "-c", "echo \"Hello$ENV_ARG\"").call(env = Map("ENV_ARG" -> "12")) + assert(res0.out.lines() == Seq("Hello12")) + } + + locally { + val res1 = proc("bash", "-c", "echo \"Hello$ENV_ARG\"").call(env = Map("ENV_ARG" -> "12")) + assert(res1.out.lines() == Seq("Hello12")) + } + + locally { + val res2 = proc("bash", "-c", "echo 'Hello$ENV_ARG'").call(env = Map("ENV_ARG" -> "12")) + assert(res2.out.lines() == Seq("Hello$ENV_ARG")) + } + + locally { + val res3 = proc("bash", "-c", "echo 'Hello'$ENV_ARG").call(env = Map("ENV_ARG" -> "123")) + assert(res3.out.lines() == Seq("Hello123")) + } + + locally { + // TEST_SUBPROCESS_ENV env should be set in forkEnv in build.sc + assert(sys.env.get("TEST_SUBPROCESS_ENV") == Some("value")) + val res4 = proc("bash", "-c", "echo \"$TEST_SUBPROCESS_ENV\"").call( + env = Map.empty, + propagateEnv = false + ).out.lines() + assert(res4 == Seq("")) + } + + locally { + // TEST_SUBPROCESS_ENV env should be set in forkEnv in build.sc + assert(sys.env.get("TEST_SUBPROCESS_ENV") == Some("value")) + + val res5 = proc("bash", "-c", "echo \"$TEST_SUBPROCESS_ENV\"").call( + env = Map.empty, + propagateEnv = true + ).out.lines() + assert(res5 == Seq("value")) + } + } + } + test("envWithValue") { + if (Unix()) { + val variableName = "TEST_ENV_FOO" + val variableValue = "bar" + def envValue() = os.proc( + "bash", + "-c", + s"""if [ -z $${$variableName+x} ]; then echo "unset"; else echo "$$$variableName"; fi""" + ).call().out.lines().head + + val before = envValue() + assert(before == "unset") + + os.SubProcess.env.withValue(Map(variableName -> variableValue)) { + val res = envValue() + assert(res == variableValue) + } + + val after = envValue() + assert(after == "unset") + } + } + test("multiChunk") { + // Make sure that in the case where multiple chunks are being read from + // the subprocess in quick succession, we ensure that the output handler + // callbacks are properly ordered such that the output is aggregated + // correctly + test("bashC") { + if (TestUtil.isInstalled("python")) { + os.proc( + "python", + "-c", + """import sys, time + |for i in range(5): + | for j in range(10): + | sys.stdout.write(str(j)) + | # Make sure it comes as multiple chunks, but close together! + | # Vary how close they are together to try and trigger race conditions + | time.sleep(0.00001 * i) + | sys.stdout.flush() + """.stripMargin + ).call().out.text() ==> + "01234567890123456789012345678901234567890123456789" + } + } + test("jarTf") { + // This was the original repro for the multi-chunk concurrency bugs + val jarFile = os.Path(sys.env("OS_TEST_RESOURCE_FOLDER")) / "misc/out.jar" + assert(TestUtil.eqIgnoreNewlineStyle( + os.proc("jar", "-tf", jarFile).call().out.text(), + """META-INF/MANIFEST.MF + |test/FooTwo.class + |test/Bar.class + |test/BarTwo.class + |test/Foo.class + |test/BarThree.class + |hello.txt + |""".stripMargin + )) + } + } + test("workingDirectory") { + val listed1 = TestUtil.proc(lsCmd).call(cwd = pwd) + val listed2 = TestUtil.proc(lsCmd).call(cwd = pwd / up) + + assert(listed2 != listed1) + } + test("customWorkingDir") { + val res1 = TestUtil.proc(lsCmd).call(cwd = pwd) // explicitly + // or implicitly + val res2 = TestUtil.proc(lsCmd).call() + } + + test("fileCustomWorkingDir") { + if (Unix()) { + val output = proc(scriptFolder / "misc/echo_with_wd", "HELLO").call(cwd = root / "usr") + assert(output.out.lines() == Seq("HELLO /usr")) + } + } + test("dynamicPwd") { + // Windows doesnt have bash installed so a bit inconvenient + // to run these subprocesses for testing + if (!scala.util.Properties.isWin) { + val outsidePwd = os.pwd + val tmp0 = os.temp.dir() + val tmp = os.followLink(tmp0).getOrElse(tmp0) + val x = proc("bash", "-c", "pwd").call() + val y = os.dynamicPwd.withValue(tmp) { + proc("bash", "-c", "pwd").call() + } + + val z = proc("bash", "-c", "pwd").call() + assert(outsidePwd.toString != tmp.toString) + assert(x.out.trim() == outsidePwd.toString) + assert(y.out.trim() == tmp.toString) + assert(z.out.trim() == outsidePwd.toString) + } + } + } +} diff --git a/os/test/src/TestUtil.scala b/os/test/src/TestUtil.scala new file mode 100644 index 00000000..fc82a7c6 --- /dev/null +++ b/os/test/src/TestUtil.scala @@ -0,0 +1,143 @@ +package test.os + +import utest.framework.TestPath + +import java.io.IOException +import java.nio.file._ +import java.nio.file.attribute.BasicFileAttributes + +object TestUtil { + + val NewLineRegex = "\r\n|\r|\n" + + def isInstalled(executable: String): Boolean = { + val getPathCmd = if (scala.util.Properties.isWin) "where" else "which" + os.proc(getPathCmd, executable).call(check = false).exitCode == 0 + } + + def isPython3(): Boolean = { + os.proc("python", "--version").call(check = false).out.text().startsWith("Python 3.") + } + + // run Unix command normally, Windows in CMD context + def proc(command: os.Shellable*) = { + if (scala.util.Properties.isWin) { + val cmd = ("CMD.EXE": os.Shellable) :: ("/C": os.Shellable) :: command.toList + os.proc(cmd: _*) + } else os.proc(command) + } + + // 1. when using Git "core.autocrlf true" + // some tests would fail when comparing with only \n + // 2. when using Git "core.autocrlf false" + // some tests would fail when comparing with process outputs which produce CRLF strings + /** Compares two strings, ignoring line-ending style */ + def eqIgnoreNewlineStyle(str1: String, str2: String) = { + val str1Normalized = str1.replaceAll(NewLineRegex, "\n").replaceAll("\n+", "\n") + val str2Normalized = str2.replaceAll(NewLineRegex, "\n").replaceAll("\n+", "\n") + str1Normalized == str2Normalized + } + + def prep[T](f: os.Path => T)(implicit tp: TestPath, fn: sourcecode.FullName) = { + val segments = Seq("out", "scratch") ++ fn.value.split('.').drop(2) ++ tp.value + + val directory = Paths.get(segments.mkString("/")) + if (!Files.exists(directory)) Files.createDirectories(directory.getParent) + else Files.walkFileTree( + directory, + new SimpleFileVisitor[Path]() { + override def visitFile(file: Path, attrs: BasicFileAttributes) = { + Files.delete(file) + FileVisitResult.CONTINUE + } + + override def postVisitDirectory(dir: Path, exc: IOException) = { + Files.delete(dir) + FileVisitResult.CONTINUE + } + } + ) + + val original = Paths.get(sys.env("OS_TEST_RESOURCE_FOLDER"), "test") + Files.walkFileTree( + original, + new SimpleFileVisitor[Path]() { + override def preVisitDirectory(dir: Path, attrs: BasicFileAttributes) = { + Files.copy(dir, directory.resolve(original.relativize(dir)), LinkOption.NOFOLLOW_LINKS) + FileVisitResult.CONTINUE + } + + override def visitFile(file: Path, attrs: BasicFileAttributes) = { + Files.copy(file, directory.resolve(original.relativize(file)), LinkOption.NOFOLLOW_LINKS) + FileVisitResult.CONTINUE + } + } + ) + + f(os.Path(directory.toAbsolutePath)) + } + + def prepChecker[T](f: os.Path => T)(implicit tp: TestPath, fn: sourcecode.FullName): T = + prep(wd => os.checker.withValue(AccessChecker(wd))(f(wd))) + + object AccessChecker { + def apply(roots: os.Path*): AccessChecker = AccessChecker(roots, roots) + } + + case class AccessChecker(readRoots: Seq[os.Path], writeRoots: Seq[os.Path]) extends os.Checker { + + def onRead(path: os.ReadablePath): Unit = { + path match { + case path: os.Path => + if (!readRoots.exists(path.startsWith)) throw ReadDenied(path, readRoots) + case _ => + } + } + + def onWrite(path: os.Path): Unit = { + // skip check when not writing to filesystem (like when writing to a zip file) + if (path.wrapped.getFileSystem.provider().getScheme == "file") { + if (!writeRoots.exists(path.startsWith)) throw WriteDenied(path, writeRoots) + } + } + } + + object Unchecked { + + def apply[T](thunk: => T): T = + os.checker.withValue(os.Checker.Nop)(thunk) + + def scope[T](acquire: => Unit, release: => Unit)(thunk: => T): T = { + apply(acquire) + try thunk + finally apply(release) + } + } + + case class ReadDenied(requested: os.Path, allowed: Seq[os.Path]) + extends Exception( + s"Cannot read from $requested. Read is ${ + if (allowed.isEmpty) "not permitted" + else s"restricted to ${allowed.mkString(", ")}" + }." + ) + + case class WriteDenied(requested: os.Path, allowed: Seq[os.Path]) + extends Exception( + s"Cannot write to $requested. Write is ${ + if (allowed.isEmpty) "not permitted" + else s"restricted to ${allowed.mkString(", ")}" + }." + ) + + lazy val isDotty = { + val cl: ClassLoader = Thread.currentThread().getContextClassLoader + try { + cl.loadClass("scala.runtime.Scala3RunTime") + true + } catch { + case _: ClassNotFoundException => + false + } + } +} diff --git a/os/test/src/ZipOpTests.scala b/os/test/src/ZipOpTests.scala new file mode 100644 index 00000000..d0a4c07e --- /dev/null +++ b/os/test/src/ZipOpTests.scala @@ -0,0 +1,237 @@ +package test.os + +import os.zip +import test.os.TestUtil.prep +import utest._ + +import java.io.{ByteArrayInputStream, ByteArrayOutputStream, PrintStream} +import java.util.zip.{ZipEntry, ZipOutputStream} + +object ZipOpTests extends TestSuite { + + def tests = Tests { + // This test seems really flaky for some reason + // test("level") - prep { wd => + // val zipsForLevel = for (i <- Range.inclusive(0, 9)) yield { + // os.write.over(wd / "File.txt", Range(0, 1000).map(x => x.toString * x)) + // os.zip( + // dest = wd / s"archive-$i.zip", + // sources = Seq( + // wd / "File.txt", + // wd / "folder1" + // ), + // compressionLevel = i + // ) + // } + + // // We can't compare every level because compression isn't fully monotonic, + // // but we compare some arbitrary levels just to sanity check things + + // // Uncompressed zip is definitely bigger than first level of compression + // assert(os.size(zipsForLevel(0)) > os.size(zipsForLevel(1))) + // // First level of compression is bigger than middle compression + // assert(os.size(zipsForLevel(1)) > os.size(zipsForLevel(5))) + // // Middle compression is bigger than best compression + // assert(os.size(zipsForLevel(5)) > os.size(zipsForLevel(9))) + // } + test("renaming") - prep { wd => + val zipFileName = "zip-file-test.zip" + val zipFile1: os.Path = os.zip( + dest = wd / zipFileName, + sources = Seq( + // renaming files and folders + wd / "File.txt" -> os.sub / "renamed-file.txt", + wd / "folder1" -> os.sub / "renamed-folder" + ) + ) + + val unzippedFolder = os.unzip( + source = zipFile1, + dest = wd / "unzipped folder" + ) + + val paths = os.walk(unzippedFolder) + val expected = Seq( + wd / "unzipped folder/renamed-file.txt", + wd / "unzipped folder/renamed-folder", + wd / "unzipped folder/renamed-folder/one.txt" + ) + assert(paths.sorted == expected) + } + + test("excludePatterns") - prep { wd => + val amxFile = "File.amx" + os.copy(wd / "File.txt", wd / amxFile) + + // Zipping files and folders in a new zip file + val zipFileName = "zipByExcludingCertainFiles.zip" + val zipFile1: os.Path = os.zip( + dest = wd / zipFileName, + sources = Seq( + wd / "File.txt", + wd / amxFile, + wd / "Multi Line.txt" + ), + excludePatterns = Seq(".*\\.txt".r) + ) + + // Unzip file to check for contents + val outputZipFilePath = os.unzip( + zipFile1, + dest = wd / "zipByExcludingCertainFiles" + ) + val paths = os.walk(outputZipFilePath).sorted + val expected = Seq(wd / "zipByExcludingCertainFiles/File.amx") + assert(paths == expected) + } + + test("includePatterns") - prep { wd => + val amxFile = "File.amx" + os.copy(wd / "File.txt", wd / amxFile) + + // Zipping files and folders in a new zip file + val zipFileName = "zipByIncludingCertainFiles.zip" + val zipFile1: os.Path = os.zip( + dest = wd / zipFileName, + sources = Seq( + wd / "File.txt", + wd / amxFile, + wd / "Multi Line.txt" + ), + includePatterns = Seq(".*\\.amx".r) + ) + + // Unzip file to check for contents + val outputZipFilePath = + os.unzip(zipFile1, dest = wd / "zipByIncludingCertainFiles") + val paths = os.walk(outputZipFilePath) + val expected = Seq(wd / "zipByIncludingCertainFiles" / amxFile) + assert(paths == expected) + } + + test("zipStream") - prep { wd => + val zipFileName = "zipStreamFunction.zip" + + val stream = os.write.outputStream(wd / "zipStreamFunction.zip") + + val writable = zip.stream(sources = Seq(wd / "File.txt")) + + writable.writeBytesTo(stream) + stream.close() + + val unzippedFolder = os.unzip( + source = wd / zipFileName, + dest = wd / "zipStreamFunction" + ) + + val paths = os.walk(unzippedFolder) + assert(paths == Seq(unzippedFolder / "File.txt")) + } + + test("list") - prep { wd => + // Zipping files and folders in a new zip file + val zipFileName = "listContentsOfZipFileWithoutExtracting.zip" + val zipFile: os.Path = os.zip( + dest = wd / zipFileName, + sources = Seq( + wd / "File.txt", + wd / "folder1" + ) + ) + + // Unzip file to a destination folder + val listedContents = os.unzip.list(source = wd / zipFileName).toSeq + + val expected = Seq(os.sub / "File.txt", os.sub / "one.txt") + assert(listedContents == expected) + } + + test("unzipExcludePatterns") - prep { wd => + val amxFile = "File.amx" + os.copy(wd / "File.txt", wd / amxFile) + + val zipFileName = "unzipAllExceptExcludingCertainFiles.zip" + val zipFile: os.Path = os.zip( + dest = wd / zipFileName, + sources = Seq( + wd / "File.txt", + wd / amxFile, + wd / "folder1" + ) + ) + + // Unzip file to a destination folder + val unzippedFolder = os.unzip( + source = wd / zipFileName, + dest = wd / "unzipAllExceptExcludingCertainFiles", + excludePatterns = Seq(amxFile.r) + ) + + val paths = os.walk(unzippedFolder) + val expected = Seq( + wd / "unzipAllExceptExcludingCertainFiles/File.txt", + wd / "unzipAllExceptExcludingCertainFiles/one.txt" + ) + + assert(paths.toSet == expected.toSet) + } + + test("zipList") - prep { wd => + val sources = wd / "folder1" + val zipFilePath = os.zip( + dest = wd / "my.zip", + sources = os.list(sources) + ) + + val expected = os.unzip.list(source = zipFilePath).map(_.resolveFrom(sources)).toSet + assert(os.list(sources).toSet == expected) + } + + test("unzipStream") - prep { wd => + // Step 1: Create an in-memory ZIP file as a stream + val zipStreamOutput = new ByteArrayOutputStream() + val zipOutputStream = new ZipOutputStream(zipStreamOutput) + + // Step 2: Add some files to the ZIP + val file1Name = "file1.txt" + val file2Name = "nested/folder/file2.txt" + + // Add first file + zipOutputStream.putNextEntry(new ZipEntry(file1Name)) + zipOutputStream.write("Content of file1".getBytes) + zipOutputStream.closeEntry() + + // Add second file inside a nested folder + zipOutputStream.putNextEntry(new ZipEntry(file2Name)) + zipOutputStream.write("Content of file2".getBytes) + zipOutputStream.closeEntry() + + // Close the ZIP output stream + zipOutputStream.close() + + // Step 3: Prepare the destination folder for unzipping + val unzippedFolder = wd / "unzipped-stream-folder" + val readableZipStream: java.io.InputStream = + new ByteArrayInputStream(zipStreamOutput.toByteArray) + + // Unzipping the stream to the destination folder + os.unzip.stream( + source = readableZipStream, + dest = unzippedFolder + ) + + // Step 5: Verify the unzipped files and contents + val paths = os.walk(unzippedFolder) + assert(paths.contains(unzippedFolder / file1Name)) + assert(paths.contains(unzippedFolder / "nested" / "folder" / "file2.txt")) + + // Check the contents of the files + val file1Content = os.read(unzippedFolder / file1Name) + val file2Content = os.read(unzippedFolder / "nested" / "folder" / "file2.txt") + + assert(file1Content == "Content of file1") + assert(file2Content == "Content of file2") + } + + } +} diff --git a/os/test/testJarExit/src/TestJarExit.java b/os/test/testJarExit/src/TestJarExit.java new file mode 100644 index 00000000..01a98918 --- /dev/null +++ b/os/test/testJarExit/src/TestJarExit.java @@ -0,0 +1,11 @@ +import java.util.Scanner; + +public class TestJarExit { + public static void main(String[] args) throws InterruptedException { + int exitCode = Integer.parseInt(args[0]); + int exitSleep = Integer.parseInt(args[1]); + System.err.println("Exiting with code: " + exitCode); + Thread.sleep(exitSleep); + System.exit(exitCode); + } +} diff --git a/os/test/testJarReader/src/TestJarReader.java b/os/test/testJarReader/src/TestJarReader.java new file mode 100644 index 00000000..66c7af00 --- /dev/null +++ b/os/test/testJarReader/src/TestJarReader.java @@ -0,0 +1,24 @@ +import java.util.Scanner; + +public class TestJarReader { + public static void main(String[] args) throws InterruptedException { + Scanner scanner = new Scanner(System.in); + int readN = Integer.parseInt(args[0]); + int readSleep = Integer.parseInt(args[1]); + boolean debugOutput = Boolean.parseBoolean(args[2]); + int i = 0; + while(readN == -1 || i < readN) { + if(debugOutput) { + System.err.println("At: " + i); + } + String read = scanner.nextLine(); + System.out.println("Read: " + read); + Thread.sleep(readSleep); + i++; + } + scanner.close(); + if(debugOutput) { + System.err.println("Exiting reader"); + } + } +} diff --git a/os/test/testJarWriter/src/TestJarWriter.java b/os/test/testJarWriter/src/TestJarWriter.java new file mode 100644 index 00000000..05390932 --- /dev/null +++ b/os/test/testJarWriter/src/TestJarWriter.java @@ -0,0 +1,24 @@ +import java.util.Scanner; +import java.lang.InterruptedException; + +public class TestJarWriter { + public static void main(String[] args) throws InterruptedException { + Scanner scanner = new Scanner(System.in); + int writeN = Integer.parseInt(args[0]); + int writeSleep = Integer.parseInt(args[1]); + boolean debugOutput = Boolean.parseBoolean(args[2]); + int i = 0; + while(writeN == -1 || i < writeN) { + System.out.println("Hello " + i); + if(debugOutput) { + System.err.println("Written " + i); + } + Thread.sleep(writeSleep); + i++; + } + scanner.close(); + if(debugOutput) { + System.err.println("Exiting writer"); + } + } +} diff --git a/os/test/testSpawnExitHook/src/TestSpawnExitHook.scala b/os/test/testSpawnExitHook/src/TestSpawnExitHook.scala new file mode 100644 index 00000000..026fd60f --- /dev/null +++ b/os/test/testSpawnExitHook/src/TestSpawnExitHook.scala @@ -0,0 +1,15 @@ +package test.os + +object TestSpawnExitHook { + def main(args: Array[String]): Unit = { + Runtime.getRuntime.addShutdownHook( + new Thread(() => { + for (shutdownDelay <- args.lift(1)) Thread.sleep(shutdownDelay.toLong) + System.err.println("Shutdown Hook") + }) + ) + val cmd = (sys.env("TEST_SPAWN_EXIT_HOOK_ASSEMBLY2"), args(0)) + os.spawn(cmd = cmd, destroyOnExit = true) + Thread.sleep(99999) + } +} diff --git a/os/test/testSpawnExitHook2/src/TestSpawnExitHook2.java b/os/test/testSpawnExitHook2/src/TestSpawnExitHook2.java new file mode 100644 index 00000000..d9d5195f --- /dev/null +++ b/os/test/testSpawnExitHook2/src/TestSpawnExitHook2.java @@ -0,0 +1,12 @@ +package test.os; +import java.nio.file.StandardOpenOption; + +public class TestSpawnExitHook2{ + public static void main(String[] args) throws Exception{ + java.nio.channels.FileChannel.open( + java.nio.file.Paths.get(args[0]), + java.util.EnumSet.of(StandardOpenOption.READ, StandardOpenOption.WRITE) + ).lock(); + Thread.sleep(1337000); + } +} diff --git a/os/watch/test/src/WatchTests.scala b/os/watch/test/src/WatchTests.scala index 33bf97b8..acbcef9b 100644 --- a/os/watch/test/src/WatchTests.scala +++ b/os/watch/test/src/WatchTests.scala @@ -75,7 +75,7 @@ object WatchTests extends TestSuite with TestSuite.Retries { Set(os.sub / "my-new-folder") ) - checkFileManglingChanges(wd / "my-new-folder" / "test") + checkFileManglingChanges(wd / "my-new-folder/test") locally { val expectedChanges = if (isWin) Set( @@ -85,10 +85,10 @@ object WatchTests extends TestSuite with TestSuite.Retries { else Set( os.sub / "folder2", os.sub / "folder3", - os.sub / "folder3" / "nestedA", - os.sub / "folder3" / "nestedA" / "a.txt", - os.sub / "folder3" / "nestedB", - os.sub / "folder3" / "nestedB" / "b.txt" + os.sub / "folder3/nestedA", + os.sub / "folder3/nestedA/a.txt", + os.sub / "folder3/nestedB", + os.sub / "folder3/nestedB/b.txt" ) checkChanges( os.move(wd / "folder2", wd / "folder3"), @@ -100,10 +100,10 @@ object WatchTests extends TestSuite with TestSuite.Retries { os.copy(wd / "folder3", wd / "folder4"), Set( os.sub / "folder4", - os.sub / "folder4" / "nestedA", - os.sub / "folder4" / "nestedA" / "a.txt", - os.sub / "folder4" / "nestedB", - os.sub / "folder4" / "nestedB" / "b.txt" + os.sub / "folder4/nestedA", + os.sub / "folder4/nestedA/a.txt", + os.sub / "folder4/nestedB", + os.sub / "folder4/nestedB/b.txt" ) ) @@ -111,15 +111,15 @@ object WatchTests extends TestSuite with TestSuite.Retries { os.remove.all(wd / "folder4"), Set( os.sub / "folder4", - os.sub / "folder4" / "nestedA", - os.sub / "folder4" / "nestedA" / "a.txt", - os.sub / "folder4" / "nestedB", - os.sub / "folder4" / "nestedB" / "b.txt" + os.sub / "folder4/nestedA", + os.sub / "folder4/nestedA/a.txt", + os.sub / "folder4/nestedB", + os.sub / "folder4/nestedB/b.txt" ) ) - checkFileManglingChanges(wd / "folder3" / "nestedA" / "double-nested-file") - checkFileManglingChanges(wd / "folder3" / "nestedB" / "double-nested-file") + checkFileManglingChanges(wd / "folder3/nestedA/double-nested-file") + checkFileManglingChanges(wd / "folder3/nestedB/double-nested-file") checkChanges( os.symlink(wd / "newlink", wd / "doesntexist"), @@ -132,13 +132,13 @@ object WatchTests extends TestSuite with TestSuite.Retries { ) checkChanges( - os.hardlink(wd / "newlink3", wd / "folder3" / "nestedA" / "a.txt"), + os.hardlink(wd / "newlink3", wd / "folder3/nestedA/a.txt"), System.getProperty("os.name") match { case "Mac OS X" => Set( os.sub / "newlink3", - os.sub / "folder3" / "nestedA", - os.sub / "folder3" / "nestedA" / "a.txt" + os.sub / "folder3/nestedA", + os.sub / "folder3/nestedA/a.txt" ) case _ => Set(os.sub / "newlink3") } From 0d7084e2a5795a86d15820b3aee5d4726f402da1 Mon Sep 17 00:00:00 2001 From: Li Haoyi Date: Tue, 18 Mar 2025 17:02:33 +0800 Subject: [PATCH 2/2] cleanup --- os/src/FileOps.scala | 12 +++----- os/src/ProcessOps.scala | 66 +++-------------------------------------- 2 files changed, 8 insertions(+), 70 deletions(-) diff --git a/os/src/FileOps.scala b/os/src/FileOps.scala index 2805a53a..676687f2 100644 --- a/os/src/FileOps.scala +++ b/os/src/FileOps.scala @@ -41,8 +41,7 @@ object makeDir extends Function1[Path, Unit] { * destination path already containts a directory */ object all extends Function1[Path, Unit] { - def apply(path: Path): Unit = apply(path, null, true) - def apply(path: Path, perms: PermSet = null, acceptLinkedDirectory: Boolean = true): Unit = { + def apply(path: Path, @unroll perms: PermSet = null, @unroll acceptLinkedDirectory: Boolean = true): Unit = { checker.value.onWrite(path) // We special case calling makeDir.all on a symlink to a directory; // normally createDirectories blows up noisily, when really what most @@ -272,8 +271,7 @@ object copy { * does nothing if there aren't any */ object remove extends Function1[Path, Boolean] { - def apply(target: Path): Boolean = apply(target, false) - def apply(target: Path, checkExists: Boolean = false): Boolean = { + def apply(target: Path, @unroll checkExists: Boolean = false): Boolean = { checker.value.onWrite(target) if (checkExists) { Files.delete(target.wrapped) @@ -284,8 +282,7 @@ object remove extends Function1[Path, Boolean] { } object all extends Function1[Path, Unit] { - def apply(target: Path): Unit = apply(target, ignoreErrors = false) - def apply(target: Path, ignoreErrors: Boolean = false): Unit = { + def apply(target: Path, @unroll ignoreErrors: Boolean = false): Unit = { require(target.segmentCount != 0, s"Cannot remove a root directory: $target") checker.value.onWrite(target) @@ -308,8 +305,7 @@ object remove extends Function1[Path, Boolean] { * Checks if a file or folder exists at the given path. */ object exists extends Function1[Path, Boolean] { - def apply(p: Path): Boolean = Files.exists(p.wrapped) - def apply(p: Path, followLinks: Boolean = true): Boolean = { + def apply(p: Path, @unroll followLinks: Boolean = true): Boolean = { val opts = if (followLinks) Array[LinkOption]() else Array(LinkOption.NOFOLLOW_LINKS) Files.exists(p.wrapped, opts: _*) } diff --git a/os/src/ProcessOps.scala b/os/src/ProcessOps.scala index e4a30629..0bf3613f 100644 --- a/os/src/ProcessOps.scala +++ b/os/src/ProcessOps.scala @@ -6,7 +6,7 @@ import os.SubProcess.InputStream import java.io.IOException import java.util.concurrent.LinkedBlockingQueue import ProcessOps._ - +import scala.annotation.unroll object call { /** @@ -26,7 +26,7 @@ object call { check: Boolean = true, propagateEnv: Boolean = true, shutdownGracePeriod: Long = 100, - destroyOnExit: Boolean = true + @unroll destroyOnExit: Boolean = true ): CommandResult = { os.proc(cmd).call( cwd = cwd, @@ -43,37 +43,6 @@ object call { ) } - // Bincompat Forwarder - def apply( - cmd: Shellable, - env: Map[String, String], - // Make sure `cwd` only comes after `env`, so `os.call("foo", path)` is a compile error - // since the correct syntax is `os.call(("foo", path))` - cwd: Path, - stdin: ProcessInput, - stdout: ProcessOutput, - stderr: ProcessOutput, - mergeErrIntoOut: Boolean, - timeout: Long, - check: Boolean, - propagateEnv: Boolean, - timeoutGracePeriod: Long - ): CommandResult = { - call( - cmd = cmd, - cwd = cwd, - env = env, - stdin = stdin, - stdout = stdout, - stderr = stderr, - mergeErrIntoOut = mergeErrIntoOut, - timeout = timeout, - check = check, - propagateEnv = propagateEnv, - shutdownGracePeriod = timeoutGracePeriod, - destroyOnExit = true - ) - } } object spawn { @@ -91,8 +60,8 @@ object spawn { stderr: ProcessOutput = os.Inherit, mergeErrIntoOut: Boolean = false, propagateEnv: Boolean = true, - shutdownGracePeriod: Long = 100, - destroyOnExit: Boolean = true + @unroll shutdownGracePeriod: Long = 100, + @unroll destroyOnExit: Boolean = true ): SubProcess = { os.proc(cmd).spawn( cwd = cwd, @@ -106,33 +75,6 @@ object spawn { destroyOnExit = destroyOnExit ) } - - // Bincompat Forwarder - def apply( - cmd: Shellable, - // Make sure `cwd` only comes after `env`, so `os.spawn("foo", path)` is a compile error - // since the correct syntax is `os.spawn(("foo", path))` - env: Map[String, String], - cwd: Path, - stdin: ProcessInput, - stdout: ProcessOutput, - stderr: ProcessOutput, - mergeErrIntoOut: Boolean, - propagateEnv: Boolean - ): SubProcess = { - spawn( - cmd = cmd, - cwd = cwd, - env = env, - stdin = stdin, - stdout = stdout, - stderr = stderr, - mergeErrIntoOut = mergeErrIntoOut, - propagateEnv = propagateEnv, - shutdownGracePeriod = 100, - destroyOnExit = true - ) - } } /**