Setup CI testing and building #1
|
@ -0,0 +1,44 @@
|
|||
name: Publish Workflow
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
jobs:
|
||||
publish:
|
||||
name: Publish library
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Java
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
distribution: adopt
|
||||
java-version: 17
|
||||
|
||||
- name: Verify Gradle wrapper
|
||||
uses: gradle/wrapper-validation-action@v1
|
||||
|
||||
- name: Setup Gradle
|
||||
uses: gradle/gradle-build-action@v2
|
||||
|
||||
- name: Run checks
|
||||
run: ./gradlew check
|
||||
|
||||
- name: Parse parameters
|
||||
id: parse
|
||||
run: |
|
||||
export VERSION="$(echo ${{ github.ref_name }} | cut -c2-)"
|
||||
echo "Parsed version: '$VERSION'"
|
||||
echo "backup_version=$VERSION" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Build
|
||||
env:
|
||||
BACKUP_VERSION: ${{ steps.parse.outputs.backup_version }}
|
||||
run: ./gradlew shadowJar
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ShadowJAR
|
||||
path: build/libs/*-all.jar
|
|
@ -0,0 +1,29 @@
|
|||
name: Test Workflow
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
jobs:
|
||||
lint-and-test:
|
||||
name: Lint and test library
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Java
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
distribution: adopt
|
||||
java-version: 17
|
||||
|
||||
- name: Verify Gradle wrapper
|
||||
uses: gradle/wrapper-validation-action@v1
|
||||
|
||||
- name: Setup Gradle
|
||||
uses: gradle/gradle-build-action@v2
|
||||
|
||||
- name: Run checks
|
||||
run: ./gradlew check
|
|
@ -2,6 +2,10 @@
|
|||
This is a small backup utility for uploading/restoring a local directory to/from
|
||||
an AWS S3 bucket.
|
||||
|
||||
## Usage
|
||||
This tool is released as a JAR in the [release page](https://git.koval.net/cyclane/teamcity-executors-test-task/releases).
|
||||
Use `java -jar <backup-jar-name>.jar --help` for more detailed usage instructions.
|
||||
|
||||
## Assumptions
|
||||
1. This test task is not interested in re-implementations of common libraries (AWS SDK, Clikt, Gradle Shadow, ...)
|
||||
2. The last part (restoration of a single file) should be optimised so that only the part of the blob required for this
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar
|
||||
|
||||
plugins {
|
||||
kotlin("jvm") version "1.9.21"
|
||||
id("com.github.johnrengelman.shadow") version "8.1.1"
|
||||
}
|
||||
|
||||
group = "net.koval"
|
||||
version = "1.0-SNAPSHOT"
|
||||
group = "net.koval.teamcity-executors-test-task"
|
||||
version = System.getenv("BACKUP_VERSION")
|
||||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
|
@ -12,6 +14,7 @@ repositories {
|
|||
|
||||
dependencies {
|
||||
implementation("aws.sdk.kotlin:s3:1.0.25")
|
||||
implementation("org.slf4j:slf4j-simple:2.0.9")
|
||||
implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.7.3")
|
||||
testImplementation("org.jetbrains.kotlin:kotlin-test")
|
||||
}
|
||||
|
|
|
@ -24,11 +24,18 @@ import java.util.zip.ZipInputStream
|
|||
import java.util.zip.ZipOutputStream
|
||||
import kotlin.io.path.createDirectory
|
||||
|
||||
/**
|
||||
* AWS S3 backup client.
|
||||
*/
|
||||
class BackupClient(
|
||||
private val s3: S3Client,
|
||||
private val bucketName: String,
|
||||
private val bufSize: Int = 1024 * 1024 * 100
|
||||
) {
|
||||
/**
|
||||
* Upload a file/directory backup to AWS S3.
|
||||
* @param file The File object for the file or directory.
|
||||
*/
|
||||
suspend fun upload(file: File) = coroutineScope {
|
||||
val backupKey = "${file.name}/${Instant.now()}.zip"
|
||||
PipedInputStream().use { inputStream ->
|
||||
|
@ -58,7 +65,7 @@ class BackupClient(
|
|||
partNumber = number
|
||||
uploadId = upload.uploadId
|
||||
body = ByteStream.fromBytes(data.take(bytesRead))
|
||||
}.asCompletedPart(number)
|
||||
}.toCompletedPart(number)
|
||||
uploadParts.add(part)
|
||||
number++
|
||||
bytesRead = inputStream.readNBytes(data, 0, bufSize)
|
||||
|
@ -92,6 +99,11 @@ class BackupClient(
|
|||
backupKey
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore a backup from AWS S3.
|
||||
* @param destination The destination directory path for the backup contents.
|
||||
* @param backupKey The S3 key of the backup.
|
||||
*/
|
||||
suspend fun restore(destination: Path, backupKey: String) = coroutineScope {
|
||||
val req = GetObjectRequest {
|
||||
bucket = bucketName
|
||||
|
@ -107,6 +119,12 @@ class BackupClient(
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore a single file from a backup from AWS S3.
|
||||
* @param destination The destination directory path for the file from the backup.
|
||||
* @param backupKey The S3 key of the backup.
|
||||
* @param fileName The full name of the file to restore (including directories if it was under a subdirectory).
|
||||
*/
|
||||
suspend fun restoreFile(destination: Path, backupKey: String, fileName: String) = coroutineScope {
|
||||
// For byte ranges refer to https://pkware.cachefly.net/webdocs/APPNOTE/APPNOTE-6.3.9.TXT
|
||||
val eocdReq = GetObjectRequest {
|
||||
|
@ -187,7 +205,12 @@ class BackupClient(
|
|||
}
|
||||
}
|
||||
|
||||
private fun UploadPartResponse.asCompletedPart(number: Int): CompletedPart {
|
||||
/**
|
||||
* Convert an UploadPartResponse to a CompletedPart.
|
||||
* @param number The part number that was used for this part upload.
|
||||
* @return The CompletedPart object.
|
||||
*/
|
||||
private fun UploadPartResponse.toCompletedPart(number: Int): CompletedPart {
|
||||
val part = this
|
||||
return CompletedPart {
|
||||
partNumber = number
|
||||
|
@ -199,10 +222,19 @@ private fun UploadPartResponse.asCompletedPart(number: Int): CompletedPart {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Take first `n` items from the beginning of a ByteArray.
|
||||
* @param n The number of items to take.
|
||||
* @return A ByteArray of the first `n` items.
|
||||
*/
|
||||
private fun ByteArray.take(n: Int) =
|
||||
if (n == size) this // No copy
|
||||
else asList().subList(0, n).toByteArray() // TODO: One copy (toByteArray()), not sure how to do 0 copies here
|
||||
|
||||
/**
|
||||
* Compress a file or directory as a ZIP file to an `OutputStream`.
|
||||
* @param outputStream The `OutputStream` to write the ZIP file contents to.
|
||||
*/
|
||||
private fun File.compressToZip(outputStream: OutputStream) = ZipOutputStream(outputStream).use { zipStream ->
|
||||
val parentDir = this.absoluteFile.parent + "/"
|
||||
val fileQueue = ArrayDeque<File>()
|
||||
|
@ -226,6 +258,11 @@ private fun File.compressToZip(outputStream: OutputStream) = ZipOutputStream(out
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Decompress `ZipInputStream` contents to specified destination paths.
|
||||
* @param bufSize The buffer size to use for writing the decompressed files.
|
||||
* @param entryNameToPath A function to convert ZIP entry names to destination `Path`s.
|
||||
*/
|
||||
private fun ZipInputStream.decompress(
|
||||
bufSize: Int = 1024 * 1024,
|
||||
entryNameToPath: (String) -> Path
|
||||
|
@ -250,6 +287,11 @@ private fun ZipInputStream.decompress(
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a `ZipEntry`'s attributes given a file's path.
|
||||
* @param entry The `ZipEntry` to set attributes of.
|
||||
* @param path The `Path` of the file to get the attributes from.
|
||||
*/
|
||||
private fun setZipAttributes(entry: ZipEntry, path: Path) {
|
||||
try {
|
||||
val attrs = Files.getFileAttributeView(path, BasicFileAttributeView::class.java).readAttributes()
|
||||
|
@ -260,6 +302,11 @@ private fun setZipAttributes(entry: ZipEntry, path: Path) {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a file's attributes given a `ZipEntry`.
|
||||
* @param entry The `ZipEntry` to get the attributes from.
|
||||
* @param path The `Path` of the file to set the attributes of.
|
||||
*/
|
||||
private fun applyZipAttributes(entry: ZipEntry, path: Path) {
|
||||
try {
|
||||
val attrs = Files.getFileAttributeView(path, BasicFileAttributeView::class.java)
|
||||
|
|
|
@ -1,10 +1,7 @@
|
|||
package backup
|
||||
|
||||
import aws.sdk.kotlin.services.s3.S3Client
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.runBlocking
|
||||
import java.io.File
|
||||
import kotlin.io.path.Path
|
||||
|
||||
fun main() = runBlocking {
|
||||
S3Client.fromEnvironment().use { s3 ->
|
||||
|
|
|
@ -3,6 +3,9 @@ package ziputils
|
|||
import java.nio.ByteBuffer
|
||||
import java.nio.ByteOrder
|
||||
|
||||
/**
|
||||
* Represents a partial ZIP central directory file header.
|
||||
*/
|
||||
internal class CentralDirectoryFileHeader(
|
||||
val compressedSize: UInt,
|
||||
val uncompressedSize: UInt,
|
||||
|
@ -22,8 +25,11 @@ internal class CentralDirectoryFileHeader(
|
|||
const val SIZE = 46
|
||||
|
||||
/**
|
||||
* Create CentralDirectoryFileHeader from raw byte data.
|
||||
* @throws InvalidDataException provided ByteArray is not a supported CEN.
|
||||
* Create `CentralDirectoryFileHeader` from raw byte data.
|
||||
* @throws InvalidDataException provided `ByteArray` is not a supported CEN.
|
||||
* @param data Raw byte data.
|
||||
* @param offset Skip first <offset> bytes in data array.
|
||||
* @return A `CentralDirectoryFileHeader`.
|
||||
*/
|
||||
@Throws(InvalidDataException::class)
|
||||
fun fromByteArray(data: ByteArray, offset: Int): CentralDirectoryFileHeader {
|
||||
|
|
|
@ -3,12 +3,22 @@ package ziputils
|
|||
import java.nio.ByteBuffer
|
||||
import java.nio.ByteOrder
|
||||
|
||||
/**
|
||||
* Represents a partial ZIP64 end of central directory locator.
|
||||
*/
|
||||
internal class EndOfCentralDirectoryLocator(
|
||||
val endOfCentralDirectory64Offset: ULong
|
||||
) {
|
||||
companion object {
|
||||
const val SIGNATURE = 0x07064b50U
|
||||
const val SIZE = 20
|
||||
/**
|
||||
* Create `EndOfCentralDirectoryLocator` from raw byte data.
|
||||
* @throws InvalidDataException Provided `ByteArray` is not a supported EOCD locator.
|
||||
* @param data Raw byte data.
|
||||
* @param offset Skip first <offset> bytes in data array.
|
||||
* @return A `EndOfCentralDirectoryLocator`.
|
||||
*/
|
||||
@Throws(InvalidDataException::class)
|
||||
fun fromByteArray(data: ByteArray, offset: Int): EndOfCentralDirectoryLocator {
|
||||
if (data.size - offset < SIZE) {
|
||||
|
|
|
@ -4,8 +4,7 @@ import java.nio.ByteBuffer
|
|||
import java.nio.ByteOrder
|
||||
|
||||
/**
|
||||
* Partial End of Central Directory record class.
|
||||
* Only supports data required by the backup tool.
|
||||
* Represents a partial ZIP end of central directory record.
|
||||
*/
|
||||
internal class EndOfCentralDirectoryRecord(
|
||||
val centralDirectoryOffset: UInt
|
||||
|
@ -17,8 +16,11 @@ internal class EndOfCentralDirectoryRecord(
|
|||
const val SIGNATURE = 0x06054b50U
|
||||
const val SIZE = 22
|
||||
/**
|
||||
* Create EndOfCentralDirectoryRecord from raw byte data.
|
||||
* @throws InvalidDataException provided ByteArray is not a supported EOCD64.
|
||||
* Create `EndOfCentralDirectoryRecord` from raw byte data.
|
||||
* @throws InvalidDataException Provided `ByteArray` is not a supported EOCD64.
|
||||
* @param data Raw byte data.
|
||||
* @param offset Skip first <offset> bytes in data array.
|
||||
* @return A `EndOfCentralDirectoryRecord`.
|
||||
*/
|
||||
@Throws(InvalidDataException::class)
|
||||
fun fromByteArray(data: ByteArray, offset: Int): EndOfCentralDirectoryRecord {
|
||||
|
|
|
@ -4,8 +4,7 @@ import java.nio.ByteBuffer
|
|||
import java.nio.ByteOrder
|
||||
|
||||
/**
|
||||
* Partial End of Central Directory record (ZIP64) class.
|
||||
* Only supports data required by the backup tool.
|
||||
* Represents a partial ZIP64 end of central directory record.
|
||||
*/
|
||||
internal class EndOfCentralDirectoryRecord64(
|
||||
val centralDirectoryOffset: ULong
|
||||
|
@ -14,8 +13,11 @@ internal class EndOfCentralDirectoryRecord64(
|
|||
const val SIGNATURE = 0x06064b50U
|
||||
const val SIZE = 56
|
||||
/**
|
||||
* Create EndOfCentralDirectoryRecord64 from raw byte data.
|
||||
* @throws InvalidDataException provided ByteArray is not a supported EOCD.
|
||||
* Create `EndOfCentralDirectoryRecord64` from raw byte data.
|
||||
* @throws InvalidDataException Provided `ByteArray` is not a supported EOCD.
|
||||
* @param data Raw byte data.
|
||||
* @param offset Skip first <offset> bytes in data array.
|
||||
* @return A `EndOfCentralDirectoryRecord64`.
|
||||
*/
|
||||
@Throws(InvalidDataException::class)
|
||||
fun fromByteArray(data: ByteArray, offset: Int): EndOfCentralDirectoryRecord64 {
|
||||
|
|
|
@ -1,4 +1,11 @@
|
|||
package ziputils
|
||||
|
||||
/**
|
||||
* Represents an invalid raw byte data exception.
|
||||
*/
|
||||
class InvalidDataException(message: String): Exception(message)
|
||||
|
||||
/**
|
||||
* Represents an invalid raw byte signature exception.
|
||||
*/
|
||||
class InvalidSignatureException(message: String): Exception(message)
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
package ziputils
|
||||
|
||||
/**
|
||||
* Represents a partial ZIP extra field record.
|
||||
*/
|
||||
internal open class ExtraFieldRecord(
|
||||
val id: UShort,
|
||||
val size: UShort
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
package ziputils
|
||||
|
||||
/**
|
||||
* Represents a ZIP ZIP64 extra field record (ID 0x0001).
|
||||
*/
|
||||
internal class Zip64ExtraFieldRecord(
|
||||
size: UShort,
|
||||
val uncompressedSize: ULong?,
|
||||
|
|
Reference in New Issue