Skip to content

Commit

Permalink
Merge branch 'master' into domzae/source-posthog-individual-project-s…
Browse files Browse the repository at this point in the history
…election
  • Loading branch information
marcosmarxm authored Jan 10, 2025
2 parents 79a69cd + 7ea0b68 commit a13e410
Show file tree
Hide file tree
Showing 517 changed files with 11,514 additions and 12,526 deletions.
37 changes: 24 additions & 13 deletions .github/actions/install-airbyte-ci/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,21 +30,26 @@ runs:
- name: "Determine how Airbyte CI should be installed"
shell: bash
id: determine-install-mode
# When the PR is from a fork, we always install from binary
if: inputs.is_fork == 'false'
run: |
if [[ "${{ github.ref }}" != "refs/heads/master" ]] && [[ "${{ steps.changes.outputs.pipelines_any_changed }}" == "true" ]]; then
echo "Making changes to Airbyte CI on a non-master branch. Airbyte-CI will be installed from source."
echo "install-mode=source" >> $GITHUB_OUTPUT
echo "SENTRY_ENVIRONMENT=dev" >> $GITHUB_ENV
else
echo "install-mode=binary" >> $GITHUB_OUTPUT
echo "SENTRY_ENVIRONMENT=production" >> $GITHUB_ENV
fi
echo "install-mode=source" >> $GITHUB_OUTPUT
echo "SENTRY_ENVIRONMENT=dev" >> $GITHUB_ENV
# When the PR is from a fork, we always install from binary
# if: inputs.is_fork == 'false'
# run: |
# if [[ "${{ github.ref }}" != "refs/heads/master" ]] && [[ "${{ steps.changes.outputs.pipelines_any_changed }}" == "true" ]]; then
# echo "Making changes to Airbyte CI on a non-master branch. Airbyte-CI will be installed from source."
# echo "install-mode=source" >> $GITHUB_OUTPUT
# echo "SENTRY_ENVIRONMENT=dev" >> $GITHUB_ENV
# else
# echo "install-mode=binary" >> $GITHUB_OUTPUT
# echo "SENTRY_ENVIRONMENT=production" >> $GITHUB_ENV
# fi

- name: Install Airbyte CI from binary
id: install-airbyte-ci-binary
if: steps.determine-install-mode.outputs.install-mode == 'binary' || ${{ inputs.is_fork }} == 'true'
if: false
# if: steps.determine-install-mode.outputs.install-mode == 'binary' || ${{ inputs.is_fork }} == 'true'
shell: bash
run: |
curl -sSL ${{ inputs.airbyte_ci_binary_url }} --output airbyte-ci-bin
Expand All @@ -54,21 +59,27 @@ runs:
- name: Install Python 3.10
id: install-python-3-10
uses: actions/setup-python@v4
if: steps.determine-install-mode.outputs.install-mode == 'source'
# if: steps.determine-install-mode.outputs.install-mode == 'source'
with:
python-version: "3.10"
token: ${{ inputs.github_token }}

- name: Install Airbyte CI from source
id: install-airbyte-ci-source
if: steps.determine-install-mode.outputs.install-mode == 'source'
if: true
# if: steps.determine-install-mode.outputs.install-mode == 'source'
shell: bash
run: |
pip install --upgrade pip
pip install pipx
pipx ensurepath
pipx install ${{ inputs.path_to_airbyte_ci_source }}
- name: Print installed `airbyte-ci` version
shell: bash
run: |
airbyte-ci --version
- name: Get dagger engine image name
id: get-dagger-engine-image-name
shell: bash
Expand Down
4 changes: 3 additions & 1 deletion .github/workflows/airbyte-ci-release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,8 @@ jobs:
- name: Install Poetry
id: install_poetry
uses: snok/install-poetry@v1
with:
version: 1.8.5

- name: Install Dependencies
id: install_dependencies
Expand All @@ -54,7 +56,7 @@ jobs:
working-directory: airbyte-ci/connectors/pipelines/
run: poetry run poe build-release-binary ${{ env.BINARY_FILE_NAME }}

- uses: actions/upload-artifact@v3
- uses: actions/upload-artifact@v4
with:
name: airbyte-ci-${{ matrix.os }}-${{ steps.get_short_sha.outputs.sha }}
path: airbyte-ci/connectors/pipelines/dist/${{ env.BINARY_FILE_NAME }}
Expand Down
2 changes: 2 additions & 0 deletions .github/workflows/auto_merge.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ jobs:
python-version: "3.10"
- name: Install and configure Poetry
uses: snok/install-poetry@v1
with:
version: 1.8.5
- name: Run auto merge
shell: bash
working-directory: airbyte-ci/connectors/auto_merge
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/connectors_insights.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ jobs:
- name: Install Poetry
uses: snok/install-poetry@v1
with:
version: 1.8.5
virtualenvs-create: true
virtualenvs-in-project: true
installer-parallel: true
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/connectors_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ jobs:
# If the condition is not met the job will be skipped (it will not fail)
if: (github.event_name == 'pull_request' && needs.changes.outputs.connectors == 'true' && github.event.pull_request.head.repo.fork != true) || github.event_name == 'workflow_dispatch'
name: Connectors CI
runs-on: connector-test-large
runs-on: linux-20.04-large # Custom runner, defined in GitHub org settings
timeout-minutes: 360 # 6 hours
steps:
- name: Checkout Airbyte
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/connectors_version_increment_check.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ on:
jobs:
connectors_ci:
name: Connectors Version Increment Check
runs-on: connector-test-large
runs-on: linux-20.04-large # Custom runner, defined in GitHub org settings
if: github.event.pull_request.head.repo.fork != true
timeout-minutes: 22
steps:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/gradle.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ jobs:
# Any revision upwards should be based on a performance analysis of gradle scans.
# See https://github.com/airbytehq/airbyte/pull/36055 for an example of this,
# which explains why which we went down from 64 cores to 16.
runs-on: connector-test-large
runs-on: linux-20.04-large # Custom runner, defined in GitHub org settings
name: Gradle Check
timeout-minutes: 30
steps:
Expand Down
4 changes: 3 additions & 1 deletion .github/workflows/live_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ on:
jobs:
live_tests:
name: Live Tests
runs-on: connector-test-large
runs-on: linux-20.04-large # Custom runner, defined in GitHub org settings
timeout-minutes: 360 # 6 hours
steps:
- name: Checkout Airbyte
Expand All @@ -63,6 +63,8 @@ jobs:
- name: Install Poetry
id: install_poetry
uses: snok/install-poetry@v1
with:
version: 1.8.5

- name: Make poetry venv in project
id: poetry_venv
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/publish-bulk-cdk.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ env:
jobs:
publish-bulk-cdk:
name: Publish Bulk CDK
runs-on: connector-test-large
runs-on: linux-20.04-large # Custom runner, defined in GitHub org settings
timeout-minutes: 30
steps:
- name: Checkout Airbyte
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/publish-java-cdk-command.yml
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ env:
jobs:
publish-cdk:
name: Publish Java CDK
runs-on: connector-test-large
runs-on: linux-20.04-large # Custom runner, defined in GitHub org settings
timeout-minutes: 30
steps:
- name: Link comment to Workflow Run
Expand Down
4 changes: 3 additions & 1 deletion .github/workflows/regression_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ on:
jobs:
regression_tests:
name: Regression Tests
runs-on: connector-test-large
runs-on: linux-20.04-large # Custom runner, defined in GitHub org settings
timeout-minutes: 360 # 6 hours
steps:
- name: Checkout Airbyte
Expand All @@ -63,6 +63,8 @@ jobs:
- name: Install Poetry
id: install_poetry
uses: snok/install-poetry@v1
with:
version: 1.8.5

- name: Make poetry venv in project
id: poetry_venv
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/test-performance-command.yml
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ jobs:
GCP_GSM_CREDENTIALS: ${{ secrets.GCP_GSM_CREDENTIALS }}
- name: Archive test reports artifacts
if: github.event.inputs.comment-id && failure()
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: test-reports
path: |
Expand All @@ -145,7 +145,7 @@ jobs:
- name: Test coverage reports artifacts
if: github.event.inputs.comment-id && success()
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: test-reports
path: |
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@ package io.airbyte.cdk

/** This is used only in tests. */
class ConnectorUncleanExitException(val exitCode: Int) :
Exception("Destination process exited uncleanly: $exitCode")
Exception("Connector process exited uncleanly: $exitCode")
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
package io.airbyte.cdk.load.mock_integration_test

import io.airbyte.cdk.load.test.util.NoopDestinationCleaner
import io.airbyte.cdk.load.test.util.NoopExpectedRecordMapper
import io.airbyte.cdk.load.test.util.NoopNameMapper
import io.airbyte.cdk.load.test.util.UncoercedExpectedRecordMapper
import io.airbyte.cdk.load.write.BasicFunctionalityIntegrationTest
import io.airbyte.cdk.load.write.Untyped
import org.junit.jupiter.api.Disabled
Expand All @@ -18,7 +18,7 @@ class MockBasicFunctionalityIntegrationTest :
MockDestinationSpecification::class.java,
MockDestinationDataDumper,
NoopDestinationCleaner,
NoopExpectedRecordMapper,
UncoercedExpectedRecordMapper,
NoopNameMapper,
isStreamSchemaRetroactive = false,
supportsDedup = true,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ import java.time.LocalDateTime
import java.time.LocalTime
import java.time.OffsetDateTime
import java.time.OffsetTime
import java.time.ZoneOffset

sealed interface AirbyteValue {
companion object {
Expand All @@ -27,11 +26,11 @@ sealed interface AirbyteValue {
is BigInteger -> IntegerValue(value)
is Double -> NumberValue(BigDecimal.valueOf(value))
is BigDecimal -> NumberValue(value)
is LocalDate -> DateValue(value.toString())
is OffsetDateTime,
is LocalDateTime -> TimestampValue(value.toString())
is OffsetTime,
is LocalTime -> TimeValue(value.toString())
is LocalDate -> DateValue(value)
is OffsetDateTime -> TimestampWithTimezoneValue(value)
is LocalDateTime -> TimestampWithoutTimezoneValue(value)
is OffsetTime -> TimeWithTimezoneValue(value)
is LocalTime -> TimeWithoutTimezoneValue(value)
is Map<*, *> ->
ObjectValue.from(@Suppress("UNCHECKED_CAST") (value as Map<String, Any?>))
is List<*> -> ArrayValue.from(value)
Expand Down Expand Up @@ -73,76 +72,37 @@ value class NumberValue(val value: BigDecimal) : AirbyteValue, Comparable<Number
}

@JvmInline
value class DateValue(val value: String) : AirbyteValue, Comparable<DateValue> {
override fun compareTo(other: DateValue): Int {
val thisDate =
try {
LocalDate.parse(value)
} catch (e: Exception) {
LocalDate.MIN
}
val otherDate =
try {
LocalDate.parse(other.value)
} catch (e: Exception) {
LocalDate.MIN
}
return thisDate.compareTo(otherDate)
}
value class DateValue(val value: LocalDate) : AirbyteValue, Comparable<DateValue> {
constructor(date: String) : this(LocalDate.parse(date))
override fun compareTo(other: DateValue): Int = value.compareTo(other.value)
}

@JvmInline
value class TimestampValue(val value: String) : AirbyteValue, Comparable<TimestampValue> {
override fun compareTo(other: TimestampValue): Int {
// Do all comparisons using OffsetDateTime for convenience.
// First, try directly parsing as OffsetDateTime.
// If that fails, try parsing as LocalDateTime and assume UTC.
// We could maybe have separate value classes for these cases,
// but that comes with its own set of problems
// (mostly around sources declaring bad schemas).
val thisTimestamp =
try {
OffsetDateTime.parse(value)
} catch (e: Exception) {
LocalDateTime.parse(value).atOffset(ZoneOffset.UTC)
} catch (e: Exception) {
LocalDateTime.MIN.atOffset(ZoneOffset.UTC)
}
val otherTimestamp =
try {
OffsetDateTime.parse(other.value)
} catch (e: Exception) {
LocalDateTime.parse(other.value).atOffset(ZoneOffset.UTC)
} catch (e: Exception) {
LocalDateTime.MIN.atOffset(ZoneOffset.UTC)
}
return thisTimestamp.compareTo(otherTimestamp)
}
value class TimestampWithTimezoneValue(val value: OffsetDateTime) :
AirbyteValue, Comparable<TimestampWithTimezoneValue> {
constructor(timestamp: String) : this(OffsetDateTime.parse(timestamp))
override fun compareTo(other: TimestampWithTimezoneValue): Int = value.compareTo(other.value)
}

@JvmInline
value class TimeValue(val value: String) : AirbyteValue, Comparable<TimeValue> {
override fun compareTo(other: TimeValue): Int {
// Similar to TimestampValue, try parsing with/without timezone,
// and do all comparisons using OffsetTime.
val thisTime =
try {
OffsetTime.parse(value)
} catch (e: Exception) {
LocalTime.parse(value).atOffset(ZoneOffset.UTC)
} catch (e: Exception) {
LocalTime.MIN.atOffset(ZoneOffset.UTC)
}
val otherTime =
try {
OffsetTime.parse(other.value)
} catch (e: Exception) {
LocalTime.parse(other.value).atOffset(ZoneOffset.UTC)
} catch (e: Exception) {
LocalTime.MIN.atOffset(ZoneOffset.UTC)
}
return thisTime.compareTo(otherTime)
}
value class TimestampWithoutTimezoneValue(val value: LocalDateTime) :
AirbyteValue, Comparable<TimestampWithoutTimezoneValue> {
constructor(timestamp: String) : this(LocalDateTime.parse(timestamp))
override fun compareTo(other: TimestampWithoutTimezoneValue): Int = value.compareTo(other.value)
}

@JvmInline
value class TimeWithTimezoneValue(val value: OffsetTime) :
AirbyteValue, Comparable<TimeWithTimezoneValue> {
constructor(time: String) : this(OffsetTime.parse(time))
override fun compareTo(other: TimeWithTimezoneValue): Int = value.compareTo(other.value)
}

@JvmInline
value class TimeWithoutTimezoneValue(val value: LocalTime) :
AirbyteValue, Comparable<TimeWithoutTimezoneValue> {
constructor(time: String) : this(LocalTime.parse(time))
override fun compareTo(other: TimeWithoutTimezoneValue): Int = value.compareTo(other.value)
}

@JvmInline
Expand Down
Loading

0 comments on commit a13e410

Please sign in to comment.