Skip to content

Commit

Permalink
Merge branch 'master' into create-experiment-from-funnel
Browse files Browse the repository at this point in the history
  • Loading branch information
joshsny committed Jan 28, 2025
2 parents 5504697 + 8c5a64d commit 56ea5f4
Show file tree
Hide file tree
Showing 156 changed files with 12,130 additions and 2,510 deletions.
1,782 changes: 887 additions & 895 deletions .flox/env/manifest.lock

Large diffs are not rendered by default.

5 changes: 2 additions & 3 deletions .flox/env/manifest.toml
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,9 @@ libiconv.systems = ["aarch64-darwin"]
libiconv.pkg-group = "rust-toolchain"
# Go
go = { pkg-path = "go", version = "1.22", pkg-group = "go" }
# General CLI tools
# Top level
mprocs.pkg-path = "mprocs"
xmlsec.pkg-path = "xmlsec"
xmlsec.version = "1.3.6"
xmlsec = { pkg-path = "xmlsec", version = "1.3.6" }

# Set environment variables in the `[vars]` section. These variables may not
# reference one another, and are added to the environment without first
Expand Down
49 changes: 0 additions & 49 deletions .github/workflows/alert-on-failed-automerge.yml

This file was deleted.

39 changes: 37 additions & 2 deletions .github/workflows/automerge.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@ name: Automerge
env:
MERGE_METHOD: 'squash'
MERGE_RETRY_SLEEP: 300000
# MERGE_ERROR_FAIL: Set this to "true" to have the action exit with error code 1
# when the pull request could not be merged successfully during a run.
MERGE_ERROR_FAIL: true

on:
pull_request:
Expand All @@ -27,9 +30,41 @@ jobs:
env:
IS_POSTHOG_BOT_AVAILABLE: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN != '' }}
steps:
- name: Automerge
# must have an id so subsequent steps can use the output
- id: automergeStep
name: Automerge
if: env.IS_POSTHOG_BOT_AVAILABLE == 'true'
uses: pascalgn/[email protected]
env:
GITHUB_TOKEN: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }}
- run: echo

# the automerge documentation claims it adds these outputs but we don't get them
# maybe because it was a fake PR 🤷
- name: Print Automerge Outputs as JSON
if: ${{ always() }}
run: |
echo "For future debugging... the Automerge Outputs are:"
if [[ "${{ steps.automergeStep.outputs.mergeResult }}" != "" ]]; then
echo '{"mergeResult": "${{ steps.automergeStep.outputs.mergeResult }}", "pullRequestNumber": "${{ steps.automergeStep.outputs.pullRequestNumber }}"}' | jq .
else
echo "No outputs from Automerge action."
fi
# a CDP destination is setup to alert when we get this event
- name: Send Automerge Event to PostHog
if: ${{ failure() }}
uses: PostHog/[email protected]
with:
posthog-token: '${{ secrets.POSTHOG_API_TOKEN }}'
event: 'posthog-github-automerge-pr-status'
properties: >-
{
"prUrl": "${{ github.event.pull_request.html_url }}",
"jobStatus": "${{ job.status }}",
"prTitle": "${{ github.event.pull_request.title }}",
"prNumber": "${{ github.event.pull_request.number }}",
"prState": "${{ github.event.pull_request.mergeable_state }}",
"mergeResult": "${{ steps.automergeStep.outputs.mergeResult }}",
"pullRequestNumber": "${{ steps.automergeStep.outputs.pullRequestNumber }}",
"automergeOutcome": "${{ steps.automergeStep.outcome }}"
}
15 changes: 7 additions & 8 deletions .github/workflows/livestream-docker-image.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,12 @@ on:

jobs:
build:
runs-on: ubuntu-24.04
runs-on: depot-ubuntu-24.04

permissions:
contents: read
packages: write
id-token: write

outputs:
sha: ${{ steps.push.outputs.digest }}
Expand All @@ -40,23 +41,21 @@ jobs:
with:
images: ghcr.io/posthog/posthog/livestream

- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2

- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Depot CLI
uses: depot/setup-action@v1

- name: Build and push Docker image
id: push
if: github.ref == 'refs/heads/master'
uses: docker/build-push-action@v5
uses: depot/build-push-action@v1
with:
context: livestream/
context: ./livestream/
file: livestream/Dockerfile
push: true
platforms: linux/amd64,linux/arm64
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
project: '87b1ch2t7h'

deploy:
runs-on: ubuntu-24.04
Expand Down
96 changes: 16 additions & 80 deletions dags/person_overrides.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,12 @@
from clickhouse_driver import Client

from posthog import settings
from posthog.clickhouse.cluster import ClickhouseCluster, get_cluster
from posthog.clickhouse.cluster import (
ClickhouseCluster,
Mutation,
MutationRunner,
get_cluster,
)
from posthog.models.event.sql import EVENTS_DATA_TABLE
from posthog.models.person.sql import PERSON_DISTINCT_ID_OVERRIDES_TABLE

Expand Down Expand Up @@ -91,28 +96,6 @@ def sync(self, client: Client) -> None:
assert queue_size == 0


@dataclass
class Mutation:
table: str
mutation_id: str

def is_done(self, client: Client) -> bool:
[[is_done]] = client.execute(
f"""
SELECT is_done
FROM system.mutations
WHERE database = %(database)s AND table = %(table)s AND mutation_id = %(mutation_id)s
ORDER BY create_time DESC
""",
{"database": settings.CLICKHOUSE_DATABASE, "table": self.table, "mutation_id": self.mutation_id},
)
return is_done

def wait(self, client: Client) -> None:
while not self.is_done(client):
time.sleep(15.0)


@dataclass
class PersonOverridesSnapshotDictionary:
source: PersonOverridesSnapshotTable
Expand Down Expand Up @@ -194,76 +177,29 @@ def load(self, client: Client):
[[checksum]] = results
return checksum

def __find_existing_mutation(self, client: Client, table: str, command_kind: str) -> Mutation | None:
results = client.execute(
f"""
SELECT mutation_id
FROM system.mutations
WHERE
database = %(database)s
AND table = %(table)s
AND startsWith(command, %(command_kind)s)
AND command like concat('%%', %(name)s, '%%')
AND NOT is_killed -- ok to restart a killed mutation
ORDER BY create_time DESC
""",
{
"database": settings.CLICKHOUSE_DATABASE,
"table": table,
"command_kind": command_kind,
"name": self.qualified_name,
},
)
if not results:
return None
else:
assert len(results) == 1
[[mutation_id]] = results
return Mutation(table, mutation_id)

def enqueue_person_id_update_mutation(self, client: Client) -> Mutation:
table = EVENTS_DATA_TABLE()

# if this mutation already exists, don't start it again
# NOTE: this is theoretically subject to replication lag and accuracy of this result is not a guarantee
if mutation := self.__find_existing_mutation(client, table, "UPDATE"):
return mutation

client.execute(
@property
def person_id_update_mutation_runner(self) -> MutationRunner:
return MutationRunner(
EVENTS_DATA_TABLE(),
f"""
ALTER TABLE {settings.CLICKHOUSE_DATABASE}.{table}
UPDATE person_id = dictGet(%(name)s, 'person_id', (team_id, distinct_id))
WHERE dictHas(%(name)s, (team_id, distinct_id))
""",
{"name": self.qualified_name},
)

mutation = self.__find_existing_mutation(client, table, "UPDATE")
assert mutation is not None
return mutation

def enqueue_overrides_delete_mutation(self, client: Client) -> Mutation:
table = PERSON_DISTINCT_ID_OVERRIDES_TABLE

# if this mutation already exists, don't start it again
# NOTE: this is theoretically subject to replication lag and accuracy of this result is not a guarantee
if mutation := self.__find_existing_mutation(client, table, "DELETE"):
return mutation

client.execute(
@property
def overrides_delete_mutation_runner(self) -> MutationRunner:
return MutationRunner(
PERSON_DISTINCT_ID_OVERRIDES_TABLE,
f"""
ALTER TABLE {settings.CLICKHOUSE_DATABASE}.{table}
DELETE WHERE
isNotNull(dictGetOrNull(%(name)s, 'version', (team_id, distinct_id)) as snapshot_version)
AND snapshot_version >= version
""",
{"name": self.qualified_name},
)

mutation = self.__find_existing_mutation(client, table, "DELETE")
assert mutation is not None
return mutation


# Snapshot Table Management

Expand Down Expand Up @@ -375,7 +311,7 @@ def start_person_id_update_mutations(
shard_mutations = {
host.shard_num: mutation
for host, mutation in (
cluster.map_one_host_per_shard(dictionary.enqueue_person_id_update_mutation).result().items()
cluster.map_one_host_per_shard(dictionary.person_id_update_mutation_runner.enqueue).result().items()
)
}
return (dictionary, shard_mutations)
Expand All @@ -401,7 +337,7 @@ def start_overrides_delete_mutations(
dictionary: PersonOverridesSnapshotDictionary,
) -> tuple[PersonOverridesSnapshotDictionary, Mutation]:
"""Start the mutation to remove overrides contained within the snapshot from the overrides table."""
mutation = cluster.any_host(dictionary.enqueue_overrides_delete_mutation).result()
mutation = cluster.any_host(dictionary.overrides_delete_mutation_runner.enqueue).result()
return (dictionary, mutation)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,6 @@
e."$group_0" as aggregation_target,
if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id,
person.person_props as person_props,
person.pmat_email as pmat_email,
if(event = 'step one', 1, 0) as step_0,
if(step_0 = 1, timestamp, null) as latest_0,
if(event = 'step two', 1, 0) as step_1,
Expand All @@ -80,7 +79,6 @@
HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id
INNER JOIN
(SELECT id,
argMax(pmat_email, version) as pmat_email,
argMax(properties, version) as person_props
FROM person
WHERE team_id = 99999
Expand All @@ -97,7 +95,7 @@
AND event IN ['step one', 'step three', 'step two']
AND toTimeZone(timestamp, 'UTC') >= toDateTime('2021-05-01 00:00:00', 'UTC')
AND toTimeZone(timestamp, 'UTC') <= toDateTime('2021-05-10 23:59:59', 'UTC')
AND (("pmat_email" ILIKE '%g0%'
AND ((replaceRegexpAll(JSONExtractRaw(person_props, 'email'), '^"|"$', '') ILIKE '%g0%'
OR replaceRegexpAll(JSONExtractRaw(person_props, 'name'), '^"|"$', '') ILIKE '%g0%'
OR replaceRegexpAll(JSONExtractRaw(e.properties, 'distinct_id'), '^"|"$', '') ILIKE '%g0%'
OR replaceRegexpAll(JSONExtractRaw(group_properties_0, 'name'), '^"|"$', '') ILIKE '%g0%'
Expand Down
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
27 changes: 26 additions & 1 deletion frontend/src/layout/FeaturePreviews/FeaturePreviews.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,37 @@ export function FeaturePreviews(): JSX.Element {
earlyAccessFeatures.length === 0 && 'items-center justify-center'
)}
>
{earlyAccessFeatures.length > 4 && (
<div className="mb-4">
<h4 className="font-semibold mb-0">Jump to:</h4>
<ul className="list-disc pl-4">
{earlyAccessFeatures.map(
(feature) =>
feature.flagKey && (
<li key={`nav-${feature.flagKey}`}>
<Link
onClick={(e) => {
e.preventDefault()
document
.getElementById(`feature-preview-${feature.flagKey}`)
?.scrollIntoView({ behavior: 'smooth' })
}}
>
{feature.name}
</Link>
</li>
)
)}
</ul>
<LemonDivider className="my-4" />
</div>
)}
{earlyAccessFeatures.map((feature, i) => {
if (!feature.flagKey) {
return false
}
return (
<div key={feature.flagKey}>
<div key={feature.flagKey} id={`feature-preview-${feature.flagKey}`}>
{i > 0 && <LemonDivider className="my-4" />}
<FeaturePreview key={feature.flagKey} feature={feature} />
</div>
Expand Down
Loading

0 comments on commit 56ea5f4

Please sign in to comment.