From 8acec7eb0423a2341613c27f932d22d295103715 Mon Sep 17 00:00:00 2001 From: Egon Firman Date: Sat, 22 Oct 2022 22:53:22 +0700 Subject: [PATCH 1/6] adding nrpgx5 tracer for github.com/jackc/pgx/v5 --- v3/integrations/nrpgx5/README.md | 10 + v3/integrations/nrpgx5/go.mod | 12 + v3/integrations/nrpgx5/nrpgx5.go | 178 ++++++++++ v3/integrations/nrpgx5/nrpgx5_test.go | 318 ++++++++++++++++++ .../nrpgx5/pgsnap_tracer_batch.txt | 40 +++ .../nrpgx5/pgsnap_tracer_connect.txt | 16 + .../nrpgx5/pgsnap_tracer_inpool.txt | 94 ++++++ .../nrpgx5/pgsnap_tracer_trace_crud.txt | 96 ++++++ 8 files changed, 764 insertions(+) create mode 100644 v3/integrations/nrpgx5/README.md create mode 100644 v3/integrations/nrpgx5/go.mod create mode 100644 v3/integrations/nrpgx5/nrpgx5.go create mode 100644 v3/integrations/nrpgx5/nrpgx5_test.go create mode 100644 v3/integrations/nrpgx5/pgsnap_tracer_batch.txt create mode 100644 v3/integrations/nrpgx5/pgsnap_tracer_connect.txt create mode 100644 v3/integrations/nrpgx5/pgsnap_tracer_inpool.txt create mode 100644 v3/integrations/nrpgx5/pgsnap_tracer_trace_crud.txt diff --git a/v3/integrations/nrpgx5/README.md b/v3/integrations/nrpgx5/README.md new file mode 100644 index 000000000..3e5070f66 --- /dev/null +++ b/v3/integrations/nrpgx5/README.md @@ -0,0 +1,10 @@ +# v3/integrations/nrpgx5 [![GoDoc](https://godoc.org/github.com/newrelic/go-agent/v3/integrations/nrpgx5?status.svg)](https://godoc.org/github.com/newrelic/go-agent/v3/integrations/nrpgx5) + +Package `nrpgx` instruments https://github.com/jackc/pgx/v5. + +```go +import "github.com/newrelic/go-agent/v3/integrations/nrpgx5" +``` + +For more information, see +[godocs](https://godoc.org/github.com/newrelic/go-agent/v3/integrations/nrpgx5). diff --git a/v3/integrations/nrpgx5/go.mod b/v3/integrations/nrpgx5/go.mod new file mode 100644 index 000000000..920f56b2f --- /dev/null +++ b/v3/integrations/nrpgx5/go.mod @@ -0,0 +1,12 @@ +module github.com/newrelic/go-agent/v3/integrations/nrpgx5 + +go 1.11 + +require ( + github.com/egon12/pgsnap v0.0.0-20221022154027-2847f0124ed8 + github.com/jackc/pgx/v4 v4.17.2 // indirect + github.com/jackc/pgx/v5 v5.0.3 + github.com/newrelic/go-agent/v3 v3.3.0 + github.com/stretchr/testify v1.8.0 + golang.org/x/crypto v0.1.0 // indirect +) diff --git a/v3/integrations/nrpgx5/nrpgx5.go b/v3/integrations/nrpgx5/nrpgx5.go new file mode 100644 index 000000000..6e1c833da --- /dev/null +++ b/v3/integrations/nrpgx5/nrpgx5.go @@ -0,0 +1,178 @@ +// Package nrpgx5 instruments https://github.com/jackc/pgx/v5. +// +// Use this package to instrument your PostgreSQL calls using the pgx +// library. +// +// This are the steps to instrument your pgx calls without using `database/sql`: +// if you want to use `database/sql`, you can use `nrpgx` package instead +// +// to instrument your pgx calls: +// you can set the tracer in the pgx.Config like this +// ```go +// import ( +// "context" +// "github.com/jackc/pgx/v5" +// "github.com/newrelic/go-agent/v3/integrations/nrpgx5" +// ) +// +// func main() { +// config, err := pgx.ParseConfig("postgres://user:password@localhost:5432/database") +// if err != nil { +// panic(err) +// } +// +// cfg.Tracer = nrpgx5.NewTracer() +// conn, err := pgx.ConnectConfig(context.Background(), cfg) +// if err != nil { +// panic(err) +// } +// } +// ``` +// or you can set the tracer in the pgxpool.Config like this +// ```go +// import ( +// "context" +// "github.com/jackc/pgx/v5" +// "github.com/jackc/pgx/v5/pgxpool" +// "github.com/newrelic/go-agent/v3/integrations/nrpgx5" +// ) +// +// func main() { +// config, err := pgxpool.ParseConfig("postgres://user:password@localhost:5432/database") +// if err != nil { +// panic(err) +// } +// +// cfg.ConnConfig.Tracer = nrpgx5.NewTracer() +// conn, err := pgxpool.ConnectConfig(context.Background(), cfg) +// if err != nil { +// panic(err) +// } +// } +// ``` + +package nrpgx5 + +import ( + "context" + "strconv" + + "github.com/jackc/pgx/v5" + "github.com/newrelic/go-agent/v3/internal" + "github.com/newrelic/go-agent/v3/newrelic" + "github.com/newrelic/go-agent/v3/newrelic/sqlparse" +) + +func init() { + internal.TrackUsage("integration", "driver", "nrpgx5") +} + +type ( + Tracer struct { + BaseSegment newrelic.DatastoreSegment + ParseQuery func(segment *newrelic.DatastoreSegment, query string) + } + + nrPgxSegmentType string +) + +const ( + querySegmentKey nrPgxSegmentType = "nrPgx5Segment" + prepareSegmentKey nrPgxSegmentType = "prepareNrPgx5Segment" + batchSegmentKey nrPgxSegmentType = "batchNrPgx5Segment" +) + +func NewTracer() *Tracer { + return &Tracer{ + ParseQuery: sqlparse.ParseQuery, + } +} + +// TraceConnectStart is called at the beginning of Connect and ConnectConfig calls. The returned context is used for +// the rest of the call and will be passed to TraceConnectEnd. // implement pgx.ConnectTracer +func (t *Tracer) TraceConnectStart(ctx context.Context, data pgx.TraceConnectStartData) context.Context { + t.BaseSegment = newrelic.DatastoreSegment{ + Product: newrelic.DatastorePostgres, + Host: data.ConnConfig.Host, + PortPathOrID: strconv.FormatUint(uint64(data.ConnConfig.Port), 10), + DatabaseName: data.ConnConfig.Database, + } + + return ctx +} + +// TraceConnectEnd method // implement pgx.ConnectTracer +func (Tracer) TraceConnectEnd(ctx context.Context, data pgx.TraceConnectEndData) {} + +// TraceQueryStart is called at the beginning of Query, QueryRow, and Exec calls. The returned context is used for the +// rest of the call and will be passed to TraceQueryEnd. //implement pgx.QueryTracer +func (t *Tracer) TraceQueryStart(ctx context.Context, conn *pgx.Conn, data pgx.TraceQueryStartData) context.Context { + segment := t.BaseSegment + segment.StartTime = newrelic.FromContext(ctx).StartSegmentNow() + segment.ParameterizedQuery = data.SQL + segment.QueryParameters = t.getQueryParameters(data.Args) + + // fill Operation and Collection + t.ParseQuery(&segment, data.SQL) + + return context.WithValue(ctx, querySegmentKey, &segment) +} + +// TraceQueryEnd method implement pgx.QueryTracer. It will try to get segment from context and end it. +func (t *Tracer) TraceQueryEnd(ctx context.Context, conn *pgx.Conn, data pgx.TraceQueryEndData) { + segment, ok := ctx.Value(querySegmentKey).(*newrelic.DatastoreSegment) + if !ok { + return + } + segment.End() +} + +func (t *Tracer) getQueryParameters(args []interface{}) map[string]interface{} { + result := map[string]interface{}{} + for i, arg := range args { + result["$"+strconv.Itoa(i)] = arg + } + return result +} + +// TraceBatchStart is called at the beginning of SendBatch calls. The returned context is used for the +// rest of the call and will be passed to TraceBatchQuery and TraceBatchEnd. // implement pgx.BatchTracer +func (t *Tracer) TraceBatchStart(ctx context.Context, conn *pgx.Conn, data pgx.TraceBatchStartData) context.Context { + segment := t.BaseSegment + segment.StartTime = newrelic.FromContext(ctx).StartSegmentNow() + segment.Operation = "batch" + segment.Collection = "" + + return context.WithValue(ctx, batchSegmentKey, &segment) +} + +// TraceBatchQuery implement pgx.BatchTracer. In this method we will get query and store it in segment. +func (t *Tracer) TraceBatchQuery(ctx context.Context, conn *pgx.Conn, data pgx.TraceBatchQueryData) { + segment, ok := ctx.Value(batchSegmentKey).(*newrelic.DatastoreSegment) + if !ok { + return + } + + segment.ParameterizedQuery += data.SQL + "\n" +} + +// TraceBatchEnd implement pgx.BatchTracer. In this method we will get segment from context and fill it with +func (t *Tracer) TraceBatchEnd(ctx context.Context, conn *pgx.Conn, data pgx.TraceBatchEndData) { + segment, ok := ctx.Value(batchSegmentKey).(*newrelic.DatastoreSegment) + if !ok { + return + } + segment.End() +} + +// TracePrepareStart is called at the beginning of Prepare calls. The returned context is used for the +// rest of the call and will be passed to TracePrepareEnd. // implement pgx.PrepareTracer +// The Query and QueryRow will call prepare. Fill this function will make the datastore segment called twice. +// So this function woudln't do anything and just return the context. +func (t *Tracer) TracePrepareStart(ctx context.Context, conn *pgx.Conn, data pgx.TracePrepareStartData) context.Context { + return ctx +} + +// TracePrepareEnd implement pgx.PrepareTracer. In this function nothing happens. +func (t *Tracer) TracePrepareEnd(ctx context.Context, conn *pgx.Conn, data pgx.TracePrepareEndData) { +} diff --git a/v3/integrations/nrpgx5/nrpgx5_test.go b/v3/integrations/nrpgx5/nrpgx5_test.go new file mode 100644 index 000000000..90e6be1b3 --- /dev/null +++ b/v3/integrations/nrpgx5/nrpgx5_test.go @@ -0,0 +1,318 @@ +package nrpgx5 + +import ( + "context" + "net/url" + "os" + "strconv" + "testing" + + "github.com/egon12/pgsnap" + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgxpool" + "github.com/newrelic/go-agent/v3/internal" + "github.com/newrelic/go-agent/v3/internal/integrationsupport" + "github.com/newrelic/go-agent/v3/newrelic" + "github.com/stretchr/testify/assert" +) + +// to create pgnsap__** snapshot file, we are using real database. +// delete all pgnap_*.txt file and fill PGSNAP_DB_URL to recreate the snapshot file +// for example run it with +// ```sh +// PGSNAP_DB_URL="postgres://postgres:postgres@localhost:5432/postgres?sslmode=disable" go test -v -run TestTracer_Trace_CRUD +// ``` + +func TestTracer_Trace_CRUD(t *testing.T) { + con, finish := getTestCon(t) + defer finish() + + tests := []struct { + name string + fn func(context.Context, *pgx.Conn) + metric []internal.WantMetric + }{ + { + name: "query should send the metric after the row close", + fn: func(ctx context.Context, con *pgx.Conn) { + rows, _ := con.Query(ctx, "SELECT id, name, timestamp FROM mytable LIMIT $1", 2) + rows.Close() + }, + metric: []internal.WantMetric{ + {Name: "Datastore/operation/Postgres/select"}, + {Name: "Datastore/statement/Postgres/mytable/select"}, + }, + }, + { + name: "queryrow should send the metric after scan", + fn: func(ctx context.Context, con *pgx.Conn) { + row := con.QueryRow(ctx, "SELECT id, name, timestamp FROM mytable") + _ = row.Scan() + }, + metric: []internal.WantMetric{ + {Name: "Datastore/operation/Postgres/select"}, + {Name: "Datastore/statement/Postgres/mytable/select"}, + }, + }, + { + name: "insert should send the metric", + fn: func(ctx context.Context, con *pgx.Conn) { + _, _ = con.Exec(ctx, "INSERT INTO mytable(name) VALUES ($1)", "myname is") + }, + metric: []internal.WantMetric{ + {Name: "Datastore/operation/Postgres/insert"}, + {Name: "Datastore/statement/Postgres/mytable/insert"}, + }, + }, + { + name: "update should send the metric", + fn: func(ctx context.Context, con *pgx.Conn) { + _, _ = con.Exec(ctx, "UPDATE mytable set name = $2 WHERE id = $1", 1, "myname is") + }, + metric: []internal.WantMetric{ + {Name: "Datastore/operation/Postgres/update"}, + {Name: "Datastore/statement/Postgres/mytable/update"}, + }, + }, + { + name: "delete should send the metric", + fn: func(ctx context.Context, con *pgx.Conn) { + _, _ = con.Exec(ctx, "DELETE FROM mytable WHERE id = $1", 4) + }, + metric: []internal.WantMetric{ + {Name: "Datastore/operation/Postgres/delete"}, + {Name: "Datastore/statement/Postgres/mytable/delete"}, + }, + }, + { + name: "select 1 should send the metric", + fn: func(ctx context.Context, con *pgx.Conn) { + _, _ = con.Exec(ctx, "SELECT 1") + }, + metric: []internal.WantMetric{ + {Name: "Datastore/operation/Postgres/select"}, + }, + }, + { + name: "query error should also send the metric", + fn: func(ctx context.Context, con *pgx.Conn) { + _, _ = con.Query(ctx, "SELECT * FROM non_existent_table") + }, + metric: []internal.WantMetric{ + {Name: "Datastore/operation/Postgres/select"}, + {Name: "Datastore/statement/Postgres/non_existent_table/select"}, + }, + }, + { + name: "exec error should also send the metric", + fn: func(ctx context.Context, con *pgx.Conn) { + _, _ = con.Exec(ctx, "INSERT INTO non_existent_table(name) VALUES ($1)", "wrong name") + }, + metric: []internal.WantMetric{ + {Name: "Datastore/operation/Postgres/insert"}, + {Name: "Datastore/statement/Postgres/non_existent_table/insert"}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + app := integrationsupport.NewBasicTestApp() + txn := app.StartTransaction(t.Name()) + ctx := newrelic.NewContext(context.Background(), txn) + + tt.fn(ctx, con) + + txn.End() + app.ExpectMetricsPresent(t, tt.metric) + }) + } +} + +func TestTracer_connect(t *testing.T) { + conn, finish := getTestCon(t) + defer finish() + + cfg := conn.Config() + tracer := cfg.Tracer.(*Tracer) + + // hostname will + t.Run("connect should set tracer host port and database", func(t *testing.T) { + assert.Equal(t, cfg.Host, tracer.BaseSegment.Host) + assert.Equal(t, cfg.Database, tracer.BaseSegment.DatabaseName) + assert.Equal(t, strconv.FormatUint(uint64(cfg.Port), 10), tracer.BaseSegment.PortPathOrID) + }) + + t.Run("exec should send metric with instance host and port ", func(t *testing.T) { + app := integrationsupport.NewBasicTestApp() + + txn := app.StartTransaction(t.Name()) + + ctx := newrelic.NewContext(context.Background(), txn) + _, _ = conn.Exec(ctx, "INSERT INTO mytable(name) VALUES ($1)", "myname is") + + txn.End() + + app.ExpectMetricsPresent(t, []internal.WantMetric{ + {Name: "Datastore/instance/Postgres/" + getDBHostname() + "/" + tracer.BaseSegment.PortPathOrID}, + }) + }) +} + +func TestTracer_batch(t *testing.T) { + conn, finish := getTestCon(t) + defer finish() + + cfg := conn.Config() + tracer := cfg.Tracer.(*Tracer) + + t.Run("exec should send metric with instance host and port ", func(t *testing.T) { + app := integrationsupport.NewBasicTestApp() + + txn := app.StartTransaction(t.Name()) + + ctx := newrelic.NewContext(context.Background(), txn) + batch := &pgx.Batch{} + _ = batch.Queue("INSERT INTO mytable(name) VALUES ($1)", "name a") + _ = batch.Queue("INSERT INTO mytable(name) VALUES ($1)", "name b") + _ = batch.Queue("INSERT INTO mytable(name) VALUES ($1)", "name c") + _ = batch.Queue("SELECT id FROM mytable ORDER by id DESC LIMIT 1") + result := conn.SendBatch(ctx, batch) + + _ = result.Close() + + txn.End() + + app.ExpectMetricsPresent(t, []internal.WantMetric{ + {Name: "Datastore/instance/Postgres/" + getDBHostname() + "/" + tracer.BaseSegment.PortPathOrID}, + {Name: "Datastore/operation/Postgres/batch"}, + }) + }) +} + +func TestTracer_inPool(t *testing.T) { + snap := pgsnap.NewSnap(t, os.Getenv("PGSNAP_DB_URL")) + defer snap.Finish() + + cfg, _ := pgxpool.ParseConfig(snap.Addr()) + cfg.ConnConfig.Tracer = NewTracer() + + u, _ := url.Parse(snap.Addr()) + + con, _ := pgxpool.NewWithConfig(context.Background(), cfg) + + tests := []struct { + name string + fn func(context.Context, *pgxpool.Pool) + metric []internal.WantMetric + }{ + { + name: "query should send the metric after the row close", + fn: func(ctx context.Context, con *pgxpool.Pool) { + rows, _ := con.Query(ctx, "SELECT id, name, timestamp FROM mytable LIMIT $1", 2) + rows.Close() + }, + metric: []internal.WantMetric{ + {Name: "Datastore/operation/Postgres/select"}, + {Name: "Datastore/statement/Postgres/mytable/select"}, + }, + }, + { + name: "queryrow should send the metric after scan", + fn: func(ctx context.Context, con *pgxpool.Pool) { + row := con.QueryRow(ctx, "SELECT id, name, timestamp FROM mytable") + _ = row.Scan() + }, + metric: []internal.WantMetric{ + {Name: "Datastore/operation/Postgres/select"}, + {Name: "Datastore/statement/Postgres/mytable/select"}, + }, + }, + { + name: "insert should send the metric", + fn: func(ctx context.Context, con *pgxpool.Pool) { + _, _ = con.Exec(ctx, "INSERT INTO mytable(name) VALUES ($1)", "myname is") + }, + metric: []internal.WantMetric{ + {Name: "Datastore/operation/Postgres/insert"}, + {Name: "Datastore/statement/Postgres/mytable/insert"}, + }, + }, + { + name: "update should send the metric", + fn: func(ctx context.Context, con *pgxpool.Pool) { + _, _ = con.Exec(ctx, "UPDATE mytable set name = $2 WHERE id = $1", 1, "myname is") + }, + metric: []internal.WantMetric{ + {Name: "Datastore/operation/Postgres/update"}, + {Name: "Datastore/statement/Postgres/mytable/update"}, + }, + }, + { + name: "delete should send the metric", + fn: func(ctx context.Context, con *pgxpool.Pool) { + _, _ = con.Exec(ctx, "DELETE FROM mytable WHERE id = $1", 4) + }, + metric: []internal.WantMetric{ + {Name: "Datastore/operation/Postgres/delete"}, + {Name: "Datastore/statement/Postgres/mytable/delete"}, + }, + }, + { + name: "select 1 should send the metric", + fn: func(ctx context.Context, con *pgxpool.Pool) { + _, _ = con.Exec(ctx, "SELECT 1") + }, + metric: []internal.WantMetric{ + {Name: "Datastore/operation/Postgres/select"}, + }, + }, + { + name: "metric should send the metric database instance", + fn: func(ctx context.Context, con *pgxpool.Pool) { + _, _ = con.Exec(ctx, "SELECT 1") + }, + metric: []internal.WantMetric{ + {Name: "Datastore/instance/Postgres/" + getDBHostname() + "/" + u.Port()}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + app := integrationsupport.NewBasicTestApp() + txn := app.StartTransaction(t.Name()) + ctx := newrelic.NewContext(context.Background(), txn) + + tt.fn(ctx, con) + + txn.End() + app.ExpectMetricsPresent(t, tt.metric) + }) + } +} + +func getTestCon(t testing.TB) (*pgx.Conn, func()) { + snap := pgsnap.NewSnap(t, os.Getenv("PGSNAP_DB_URL")) + + cfg, _ := pgx.ParseConfig(snap.Addr()) + cfg.Tracer = NewTracer() + + con, _ := pgx.ConnectConfig(context.Background(), cfg) + + return con, func() { + _ = con.Close(context.Background()) + snap.Finish() + } +} + +// getDBHostname that should be localhost or local hostname +// becase the db is listen in local +func getDBHostname() string { + h, err := os.Hostname() + if err != nil { + return "127.0.0.1" + } + + return h +} diff --git a/v3/integrations/nrpgx5/pgsnap_tracer_batch.txt b/v3/integrations/nrpgx5/pgsnap_tracer_batch.txt new file mode 100644 index 000000000..24655bde6 --- /dev/null +++ b/v3/integrations/nrpgx5/pgsnap_tracer_batch.txt @@ -0,0 +1,40 @@ +F {"Type":"Parse","Name":"stmtcache_9","Query":"INSERT INTO mytable(name) VALUES ($1)","ParameterOIDs":null} +F {"Type":"Describe","ObjectType":"S","Name":"stmtcache_9"} +F {"Type":"Parse","Name":"stmtcache_10","Query":"SELECT id FROM mytable ORDER by id DESC LIMIT 1","ParameterOIDs":null} +F {"Type":"Describe","ObjectType":"S","Name":"stmtcache_10"} +F {"Type":"Sync"} +B {"Type":"ParseComplete"} +B {"Type":"ParameterDescription","ParameterOIDs":[1043]} +B {"Type":"NoData"} +B {"Type":"ParseComplete"} +B {"Type":"ParameterDescription","ParameterOIDs":[]} +B {"Type":"RowDescription","Fields":[{"Name":"id","TableOID":16551,"TableAttributeNumber":1,"DataTypeOID":23,"DataTypeSize":4,"TypeModifier":-1,"Format":0}]} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Bind","DestinationPortal":"","PreparedStatement":"stmtcache_9","ParameterFormatCodes":[0],"Parameters":[{"text":"name a"}],"ResultFormatCodes":[]} +F {"Type":"Describe","ObjectType":"P","Name":""} +F {"Type":"Execute","Portal":"","MaxRows":0} +F {"Type":"Bind","DestinationPortal":"","PreparedStatement":"stmtcache_9","ParameterFormatCodes":[0],"Parameters":[{"text":"name b"}],"ResultFormatCodes":[]} +F {"Type":"Describe","ObjectType":"P","Name":""} +F {"Type":"Execute","Portal":"","MaxRows":0} +F {"Type":"Bind","DestinationPortal":"","PreparedStatement":"stmtcache_9","ParameterFormatCodes":[0],"Parameters":[{"text":"name c"}],"ResultFormatCodes":[]} +F {"Type":"Describe","ObjectType":"P","Name":""} +F {"Type":"Execute","Portal":"","MaxRows":0} +F {"Type":"Bind","DestinationPortal":"","PreparedStatement":"stmtcache_10","ParameterFormatCodes":null,"Parameters":[],"ResultFormatCodes":[1]} +F {"Type":"Describe","ObjectType":"P","Name":""} +F {"Type":"Execute","Portal":"","MaxRows":0} +F {"Type":"Sync"} +B {"Type":"BindComplete"} +B {"Type":"NoData"} +B {"Type":"CommandComplete","CommandTag":"INSERT 0 1"} +B {"Type":"BindComplete"} +B {"Type":"NoData"} +B {"Type":"CommandComplete","CommandTag":"INSERT 0 1"} +B {"Type":"BindComplete"} +B {"Type":"NoData"} +B {"Type":"CommandComplete","CommandTag":"INSERT 0 1"} +B {"Type":"BindComplete"} +B {"Type":"RowDescription","Fields":[{"Name":"id","TableOID":16551,"TableAttributeNumber":1,"DataTypeOID":23,"DataTypeSize":4,"TypeModifier":-1,"Format":1}]} +B {"Type":"DataRow","Values":[{"binary":"00000008"}]} +B {"Type":"CommandComplete","CommandTag":"SELECT 1"} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Terminate"} diff --git a/v3/integrations/nrpgx5/pgsnap_tracer_connect.txt b/v3/integrations/nrpgx5/pgsnap_tracer_connect.txt new file mode 100644 index 000000000..6b13359cd --- /dev/null +++ b/v3/integrations/nrpgx5/pgsnap_tracer_connect.txt @@ -0,0 +1,16 @@ +F {"Type":"Parse","Name":"stmtcache_8","Query":"INSERT INTO mytable(name) VALUES ($1)","ParameterOIDs":null} +F {"Type":"Describe","ObjectType":"S","Name":"stmtcache_8"} +F {"Type":"Sync"} +B {"Type":"ParseComplete"} +B {"Type":"ParameterDescription","ParameterOIDs":[1043]} +B {"Type":"NoData"} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Bind","DestinationPortal":"","PreparedStatement":"stmtcache_8","ParameterFormatCodes":[0],"Parameters":[{"text":"myname is"}],"ResultFormatCodes":[]} +F {"Type":"Describe","ObjectType":"P","Name":""} +F {"Type":"Execute","Portal":"","MaxRows":0} +F {"Type":"Sync"} +B {"Type":"BindComplete"} +B {"Type":"NoData"} +B {"Type":"CommandComplete","CommandTag":"INSERT 0 1"} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Terminate"} diff --git a/v3/integrations/nrpgx5/pgsnap_tracer_inpool.txt b/v3/integrations/nrpgx5/pgsnap_tracer_inpool.txt new file mode 100644 index 000000000..e3bf80ec2 --- /dev/null +++ b/v3/integrations/nrpgx5/pgsnap_tracer_inpool.txt @@ -0,0 +1,94 @@ +F {"Type":"Parse","Name":"stmtcache_11","Query":"SELECT id, name, timestamp FROM mytable LIMIT $1","ParameterOIDs":null} +F {"Type":"Describe","ObjectType":"S","Name":"stmtcache_11"} +F {"Type":"Sync"} +B {"Type":"ParseComplete"} +B {"Type":"ParameterDescription","ParameterOIDs":[20]} +B {"Type":"RowDescription","Fields":[{"Name":"id","TableOID":16551,"TableAttributeNumber":1,"DataTypeOID":23,"DataTypeSize":4,"TypeModifier":-1,"Format":0},{"Name":"name","TableOID":16551,"TableAttributeNumber":2,"DataTypeOID":1043,"DataTypeSize":-1,"TypeModifier":-1,"Format":0},{"Name":"timestamp","TableOID":16551,"TableAttributeNumber":3,"DataTypeOID":1184,"DataTypeSize":8,"TypeModifier":-1,"Format":0}]} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Bind","DestinationPortal":"","PreparedStatement":"stmtcache_11","ParameterFormatCodes":[1],"Parameters":[{"binary":"0000000000000002"}],"ResultFormatCodes":[1,0,1]} +F {"Type":"Describe","ObjectType":"P","Name":""} +F {"Type":"Execute","Portal":"","MaxRows":0} +F {"Type":"Sync"} +B {"Type":"BindComplete"} +B {"Type":"RowDescription","Fields":[{"Name":"id","TableOID":16551,"TableAttributeNumber":1,"DataTypeOID":23,"DataTypeSize":4,"TypeModifier":-1,"Format":1},{"Name":"name","TableOID":16551,"TableAttributeNumber":2,"DataTypeOID":1043,"DataTypeSize":-1,"TypeModifier":-1,"Format":0},{"Name":"timestamp","TableOID":16551,"TableAttributeNumber":3,"DataTypeOID":1184,"DataTypeSize":8,"TypeModifier":-1,"Format":1}]} +B {"Type":"DataRow","Values":[{"binary":"00000002"},{"text":"Magdalena"},{"binary":"00028ec50f7a0c27"}]} +B {"Type":"DataRow","Values":[{"binary":"00000003"},{"text":"Someone"},{"binary":"00028ec50f7a0c27"}]} +B {"Type":"CommandComplete","CommandTag":"SELECT 2"} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Parse","Name":"stmtcache_12","Query":"SELECT id, name, timestamp FROM mytable","ParameterOIDs":null} +F {"Type":"Describe","ObjectType":"S","Name":"stmtcache_12"} +F {"Type":"Sync"} +B {"Type":"ParseComplete"} +B {"Type":"ParameterDescription","ParameterOIDs":[]} +B {"Type":"RowDescription","Fields":[{"Name":"id","TableOID":16551,"TableAttributeNumber":1,"DataTypeOID":23,"DataTypeSize":4,"TypeModifier":-1,"Format":0},{"Name":"name","TableOID":16551,"TableAttributeNumber":2,"DataTypeOID":1043,"DataTypeSize":-1,"TypeModifier":-1,"Format":0},{"Name":"timestamp","TableOID":16551,"TableAttributeNumber":3,"DataTypeOID":1184,"DataTypeSize":8,"TypeModifier":-1,"Format":0}]} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Bind","DestinationPortal":"","PreparedStatement":"stmtcache_12","ParameterFormatCodes":null,"Parameters":[],"ResultFormatCodes":[1,0,1]} +F {"Type":"Describe","ObjectType":"P","Name":""} +F {"Type":"Execute","Portal":"","MaxRows":0} +F {"Type":"Sync"} +B {"Type":"BindComplete"} +B {"Type":"RowDescription","Fields":[{"Name":"id","TableOID":16551,"TableAttributeNumber":1,"DataTypeOID":23,"DataTypeSize":4,"TypeModifier":-1,"Format":1},{"Name":"name","TableOID":16551,"TableAttributeNumber":2,"DataTypeOID":1043,"DataTypeSize":-1,"TypeModifier":-1,"Format":0},{"Name":"timestamp","TableOID":16551,"TableAttributeNumber":3,"DataTypeOID":1184,"DataTypeSize":8,"TypeModifier":-1,"Format":1}]} +B {"Type":"DataRow","Values":[{"binary":"00000002"},{"text":"Magdalena"},{"binary":"00028ec50f7a0c27"}]} +B {"Type":"DataRow","Values":[{"binary":"00000003"},{"text":"Someone"},{"binary":"00028ec50f7a0c27"}]} +B {"Type":"DataRow","Values":[{"binary":"00000001"},{"text":"myname is"},{"binary":"00028ec50f7a0c27"}]} +B {"Type":"DataRow","Values":[{"binary":"00000005"},{"text":"myname is"},{"binary":"00028ec50fdbabf2"}]} +B {"Type":"DataRow","Values":[{"binary":"00000006"},{"text":"name a"},{"binary":"00028ec50fdbc3b3"}]} +B {"Type":"DataRow","Values":[{"binary":"00000007"},{"text":"name b"},{"binary":"00028ec50fdbc3b3"}]} +B {"Type":"DataRow","Values":[{"binary":"00000008"},{"text":"name c"},{"binary":"00028ec50fdbc3b3"}]} +B {"Type":"CommandComplete","CommandTag":"SELECT 7"} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Parse","Name":"stmtcache_13","Query":"INSERT INTO mytable(name) VALUES ($1)","ParameterOIDs":null} +F {"Type":"Describe","ObjectType":"S","Name":"stmtcache_13"} +F {"Type":"Sync"} +B {"Type":"ParseComplete"} +B {"Type":"ParameterDescription","ParameterOIDs":[1043]} +B {"Type":"NoData"} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Bind","DestinationPortal":"","PreparedStatement":"stmtcache_13","ParameterFormatCodes":[0],"Parameters":[{"text":"myname is"}],"ResultFormatCodes":[]} +F {"Type":"Describe","ObjectType":"P","Name":""} +F {"Type":"Execute","Portal":"","MaxRows":0} +F {"Type":"Sync"} +B {"Type":"BindComplete"} +B {"Type":"NoData"} +B {"Type":"CommandComplete","CommandTag":"INSERT 0 1"} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Parse","Name":"stmtcache_14","Query":"UPDATE mytable set name = $2 WHERE id = $1","ParameterOIDs":null} +F {"Type":"Describe","ObjectType":"S","Name":"stmtcache_14"} +F {"Type":"Sync"} +B {"Type":"ParseComplete"} +B {"Type":"ParameterDescription","ParameterOIDs":[23,1043]} +B {"Type":"NoData"} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Bind","DestinationPortal":"","PreparedStatement":"stmtcache_14","ParameterFormatCodes":[1,0],"Parameters":[{"binary":"00000001"},{"text":"myname is"}],"ResultFormatCodes":[]} +F {"Type":"Describe","ObjectType":"P","Name":""} +F {"Type":"Execute","Portal":"","MaxRows":0} +F {"Type":"Sync"} +B {"Type":"BindComplete"} +B {"Type":"NoData"} +B {"Type":"CommandComplete","CommandTag":"UPDATE 1"} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Parse","Name":"stmtcache_15","Query":"DELETE FROM mytable WHERE id = $1","ParameterOIDs":null} +F {"Type":"Describe","ObjectType":"S","Name":"stmtcache_15"} +F {"Type":"Sync"} +B {"Type":"ParseComplete"} +B {"Type":"ParameterDescription","ParameterOIDs":[23]} +B {"Type":"NoData"} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Bind","DestinationPortal":"","PreparedStatement":"stmtcache_15","ParameterFormatCodes":[1],"Parameters":[{"binary":"00000004"}],"ResultFormatCodes":[]} +F {"Type":"Describe","ObjectType":"P","Name":""} +F {"Type":"Execute","Portal":"","MaxRows":0} +F {"Type":"Sync"} +B {"Type":"BindComplete"} +B {"Type":"NoData"} +B {"Type":"CommandComplete","CommandTag":"DELETE 0"} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Query","String":"SELECT 1"} +B {"Type":"RowDescription","Fields":[{"Name":"?column?","TableOID":0,"TableAttributeNumber":0,"DataTypeOID":23,"DataTypeSize":4,"TypeModifier":-1,"Format":0}]} +B {"Type":"DataRow","Values":[{"text":"1"}]} +B {"Type":"CommandComplete","CommandTag":"SELECT 1"} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Query","String":"SELECT 1"} +B {"Type":"RowDescription","Fields":[{"Name":"?column?","TableOID":0,"TableAttributeNumber":0,"DataTypeOID":23,"DataTypeSize":4,"TypeModifier":-1,"Format":0}]} +B {"Type":"DataRow","Values":[{"text":"1"}]} +B {"Type":"CommandComplete","CommandTag":"SELECT 1"} +B {"Type":"ReadyForQuery","TxStatus":"I"} diff --git a/v3/integrations/nrpgx5/pgsnap_tracer_trace_crud.txt b/v3/integrations/nrpgx5/pgsnap_tracer_trace_crud.txt new file mode 100644 index 000000000..41e8f50ba --- /dev/null +++ b/v3/integrations/nrpgx5/pgsnap_tracer_trace_crud.txt @@ -0,0 +1,96 @@ +F {"Type":"Parse","Name":"stmtcache_1","Query":"SELECT id, name, timestamp FROM mytable LIMIT $1","ParameterOIDs":null} +F {"Type":"Describe","ObjectType":"S","Name":"stmtcache_1"} +F {"Type":"Sync"} +B {"Type":"ParseComplete"} +B {"Type":"ParameterDescription","ParameterOIDs":[20]} +B {"Type":"RowDescription","Fields":[{"Name":"id","TableOID":16551,"TableAttributeNumber":1,"DataTypeOID":23,"DataTypeSize":4,"TypeModifier":-1,"Format":0},{"Name":"name","TableOID":16551,"TableAttributeNumber":2,"DataTypeOID":1043,"DataTypeSize":-1,"TypeModifier":-1,"Format":0},{"Name":"timestamp","TableOID":16551,"TableAttributeNumber":3,"DataTypeOID":1184,"DataTypeSize":8,"TypeModifier":-1,"Format":0}]} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Bind","DestinationPortal":"","PreparedStatement":"stmtcache_1","ParameterFormatCodes":[1],"Parameters":[{"binary":"0000000000000002"}],"ResultFormatCodes":[1,0,1]} +F {"Type":"Describe","ObjectType":"P","Name":""} +F {"Type":"Execute","Portal":"","MaxRows":0} +F {"Type":"Sync"} +B {"Type":"BindComplete"} +B {"Type":"RowDescription","Fields":[{"Name":"id","TableOID":16551,"TableAttributeNumber":1,"DataTypeOID":23,"DataTypeSize":4,"TypeModifier":-1,"Format":1},{"Name":"name","TableOID":16551,"TableAttributeNumber":2,"DataTypeOID":1043,"DataTypeSize":-1,"TypeModifier":-1,"Format":0},{"Name":"timestamp","TableOID":16551,"TableAttributeNumber":3,"DataTypeOID":1184,"DataTypeSize":8,"TypeModifier":-1,"Format":1}]} +B {"Type":"DataRow","Values":[{"binary":"00000001"},{"text":"Adrian"},{"binary":"00028ec50f7a0c27"}]} +B {"Type":"DataRow","Values":[{"binary":"00000002"},{"text":"Magdalena"},{"binary":"00028ec50f7a0c27"}]} +B {"Type":"CommandComplete","CommandTag":"SELECT 2"} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Parse","Name":"stmtcache_2","Query":"SELECT id, name, timestamp FROM mytable","ParameterOIDs":null} +F {"Type":"Describe","ObjectType":"S","Name":"stmtcache_2"} +F {"Type":"Sync"} +B {"Type":"ParseComplete"} +B {"Type":"ParameterDescription","ParameterOIDs":[]} +B {"Type":"RowDescription","Fields":[{"Name":"id","TableOID":16551,"TableAttributeNumber":1,"DataTypeOID":23,"DataTypeSize":4,"TypeModifier":-1,"Format":0},{"Name":"name","TableOID":16551,"TableAttributeNumber":2,"DataTypeOID":1043,"DataTypeSize":-1,"TypeModifier":-1,"Format":0},{"Name":"timestamp","TableOID":16551,"TableAttributeNumber":3,"DataTypeOID":1184,"DataTypeSize":8,"TypeModifier":-1,"Format":0}]} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Bind","DestinationPortal":"","PreparedStatement":"stmtcache_2","ParameterFormatCodes":null,"Parameters":[],"ResultFormatCodes":[1,0,1]} +F {"Type":"Describe","ObjectType":"P","Name":""} +F {"Type":"Execute","Portal":"","MaxRows":0} +F {"Type":"Sync"} +B {"Type":"BindComplete"} +B {"Type":"RowDescription","Fields":[{"Name":"id","TableOID":16551,"TableAttributeNumber":1,"DataTypeOID":23,"DataTypeSize":4,"TypeModifier":-1,"Format":1},{"Name":"name","TableOID":16551,"TableAttributeNumber":2,"DataTypeOID":1043,"DataTypeSize":-1,"TypeModifier":-1,"Format":0},{"Name":"timestamp","TableOID":16551,"TableAttributeNumber":3,"DataTypeOID":1184,"DataTypeSize":8,"TypeModifier":-1,"Format":1}]} +B {"Type":"DataRow","Values":[{"binary":"00000001"},{"text":"Adrian"},{"binary":"00028ec50f7a0c27"}]} +B {"Type":"DataRow","Values":[{"binary":"00000002"},{"text":"Magdalena"},{"binary":"00028ec50f7a0c27"}]} +B {"Type":"DataRow","Values":[{"binary":"00000003"},{"text":"Someone"},{"binary":"00028ec50f7a0c27"}]} +B {"Type":"CommandComplete","CommandTag":"SELECT 3"} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Parse","Name":"stmtcache_3","Query":"INSERT INTO mytable(name) VALUES ($1)","ParameterOIDs":null} +F {"Type":"Describe","ObjectType":"S","Name":"stmtcache_3"} +F {"Type":"Sync"} +B {"Type":"ParseComplete"} +B {"Type":"ParameterDescription","ParameterOIDs":[1043]} +B {"Type":"NoData"} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Bind","DestinationPortal":"","PreparedStatement":"stmtcache_3","ParameterFormatCodes":[0],"Parameters":[{"text":"myname is"}],"ResultFormatCodes":[]} +F {"Type":"Describe","ObjectType":"P","Name":""} +F {"Type":"Execute","Portal":"","MaxRows":0} +F {"Type":"Sync"} +B {"Type":"BindComplete"} +B {"Type":"NoData"} +B {"Type":"CommandComplete","CommandTag":"INSERT 0 1"} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Parse","Name":"stmtcache_4","Query":"UPDATE mytable set name = $2 WHERE id = $1","ParameterOIDs":null} +F {"Type":"Describe","ObjectType":"S","Name":"stmtcache_4"} +F {"Type":"Sync"} +B {"Type":"ParseComplete"} +B {"Type":"ParameterDescription","ParameterOIDs":[23,1043]} +B {"Type":"NoData"} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Bind","DestinationPortal":"","PreparedStatement":"stmtcache_4","ParameterFormatCodes":[1,0],"Parameters":[{"binary":"00000001"},{"text":"myname is"}],"ResultFormatCodes":[]} +F {"Type":"Describe","ObjectType":"P","Name":""} +F {"Type":"Execute","Portal":"","MaxRows":0} +F {"Type":"Sync"} +B {"Type":"BindComplete"} +B {"Type":"NoData"} +B {"Type":"CommandComplete","CommandTag":"UPDATE 1"} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Parse","Name":"stmtcache_5","Query":"DELETE FROM mytable WHERE id = $1","ParameterOIDs":null} +F {"Type":"Describe","ObjectType":"S","Name":"stmtcache_5"} +F {"Type":"Sync"} +B {"Type":"ParseComplete"} +B {"Type":"ParameterDescription","ParameterOIDs":[23]} +B {"Type":"NoData"} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Bind","DestinationPortal":"","PreparedStatement":"stmtcache_5","ParameterFormatCodes":[1],"Parameters":[{"binary":"00000004"}],"ResultFormatCodes":[]} +F {"Type":"Describe","ObjectType":"P","Name":""} +F {"Type":"Execute","Portal":"","MaxRows":0} +F {"Type":"Sync"} +B {"Type":"BindComplete"} +B {"Type":"NoData"} +B {"Type":"CommandComplete","CommandTag":"DELETE 1"} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Query","String":"SELECT 1"} +B {"Type":"RowDescription","Fields":[{"Name":"?column?","TableOID":0,"TableAttributeNumber":0,"DataTypeOID":23,"DataTypeSize":4,"TypeModifier":-1,"Format":0}]} +B {"Type":"DataRow","Values":[{"text":"1"}]} +B {"Type":"CommandComplete","CommandTag":"SELECT 1"} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Parse","Name":"stmtcache_6","Query":"SELECT * FROM non_existent_table","ParameterOIDs":null} +F {"Type":"Describe","ObjectType":"S","Name":"stmtcache_6"} +F {"Type":"Sync"} +B {"Type":"ErrorResponse","Severity":"ERROR","SeverityUnlocalized":"ERROR","Code":"42P01","Message":"relation \"non_existent_table\" does not exist","Detail":"","Hint":"","Position":15,"InternalPosition":0,"InternalQuery":"","Where":"","SchemaName":"","TableName":"","ColumnName":"","DataTypeName":"","ConstraintName":"","File":"parse_relation.c","Line":1384,"Routine":"parserOpenTable","UnknownFields":null} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Parse","Name":"stmtcache_7","Query":"INSERT INTO non_existent_table(name) VALUES ($1)","ParameterOIDs":null} +F {"Type":"Describe","ObjectType":"S","Name":"stmtcache_7"} +F {"Type":"Sync"} +B {"Type":"ErrorResponse","Severity":"ERROR","SeverityUnlocalized":"ERROR","Code":"42P01","Message":"relation \"non_existent_table\" does not exist","Detail":"","Hint":"","Position":13,"InternalPosition":0,"InternalQuery":"","Where":"","SchemaName":"","TableName":"","ColumnName":"","DataTypeName":"","ConstraintName":"","File":"parse_relation.c","Line":1384,"Routine":"parserOpenTable","UnknownFields":null} +B {"Type":"ReadyForQuery","TxStatus":"I"} +F {"Type":"Terminate"} From ea8ed9127b930665cc1062d704930252b5418427 Mon Sep 17 00:00:00 2001 From: Egon Firman Date: Wed, 2 Nov 2022 22:26:11 +0700 Subject: [PATCH 2/6] adding examples --- v3/integrations/nrpgx5/example/pgx/main.go | 53 +++++++++++++++++++ .../nrpgx5/example/pgxpool/main.go | 53 +++++++++++++++++++ v3/integrations/nrpgx5/nrpgx5.go | 33 ++++++------ 3 files changed, 122 insertions(+), 17 deletions(-) create mode 100644 v3/integrations/nrpgx5/example/pgx/main.go create mode 100644 v3/integrations/nrpgx5/example/pgxpool/main.go diff --git a/v3/integrations/nrpgx5/example/pgx/main.go b/v3/integrations/nrpgx5/example/pgx/main.go new file mode 100644 index 000000000..758af98bb --- /dev/null +++ b/v3/integrations/nrpgx5/example/pgx/main.go @@ -0,0 +1,53 @@ +package main + +import ( + "context" + "fmt" + "log" + "os" + "time" + + "github.com/jackc/pgx/v5" + "github.com/newrelic/go-agent/v3/integrations/nrpgx5" + "github.com/newrelic/go-agent/v3/newrelic" +) + +func main() { + cfg, err := pgx.ParseConfig("postgres://postgres:postgres@localhost:5432") + if err != nil { + panic(err) + } + + cfg.Tracer = nrpgx5.NewTracer() + conn, err := pgx.ConnectConfig(context.Background(), cfg) + if err != nil { + panic(err) + } + + app, err := newrelic.NewApplication( + newrelic.ConfigAppName("PostgreSQL App"), + newrelic.ConfigLicense(os.Getenv("NEW_RELIC_LICENSE_KEY")), + newrelic.ConfigDebugLogger(os.Stdout), + ) + if err != nil { + panic(err) + } + // + // N.B.: We do not recommend using app.WaitForConnection in production code. + // + app.WaitForConnection(5 * time.Second) + txn := app.StartTransaction("postgresQuery") + + ctx := newrelic.NewContext(context.Background(), txn) + row := conn.QueryRow(ctx, "SELECT count(*) FROM pg_catalog.pg_tables") + count := 0 + err = row.Scan(&count) + if err != nil { + log.Println(err) + } + + txn.End() + app.Shutdown(5 * time.Second) + + fmt.Println("number of entries in pg_catalog.pg_tables", count) +} diff --git a/v3/integrations/nrpgx5/example/pgxpool/main.go b/v3/integrations/nrpgx5/example/pgxpool/main.go new file mode 100644 index 000000000..8457a478d --- /dev/null +++ b/v3/integrations/nrpgx5/example/pgxpool/main.go @@ -0,0 +1,53 @@ +package main + +import ( + "context" + "fmt" + "log" + "os" + "time" + + "github.com/jackc/pgx/v5/pgxpool" + "github.com/newrelic/go-agent/v3/integrations/nrpgx5" + "github.com/newrelic/go-agent/v3/newrelic" +) + +func main() { + cfg, err := pgxpool.ParseConfig("postgres://postgres:postgres@localhost:5432") + if err != nil { + panic(err) + } + + cfg.ConnConfig.Tracer = nrpgx5.NewTracer() + db, err := pgxpool.NewWithConfig(context.Background(), cfg) + if err != nil { + panic(err) + } + + app, err := newrelic.NewApplication( + newrelic.ConfigAppName("PostgreSQL App"), + newrelic.ConfigLicense(os.Getenv("NEW_RELIC_LICENSE_KEY")), + newrelic.ConfigDebugLogger(os.Stdout), + ) + if err != nil { + panic(err) + } + // + // N.B.: We do not recommend using app.WaitForConnection in production code. + // + app.WaitForConnection(5 * time.Second) + txn := app.StartTransaction("postgresQuery") + + ctx := newrelic.NewContext(context.Background(), txn) + row := db.QueryRow(ctx, "SELECT count(*) FROM pg_catalog.pg_tables") + count := 0 + err = row.Scan(&count) + if err != nil { + log.Println(err) + } + + txn.End() + app.Shutdown(5 * time.Second) + + fmt.Println("number of entries in pg_catalog.pg_tables", count) +} diff --git a/v3/integrations/nrpgx5/nrpgx5.go b/v3/integrations/nrpgx5/nrpgx5.go index 6e1c833da..da3f41265 100644 --- a/v3/integrations/nrpgx5/nrpgx5.go +++ b/v3/integrations/nrpgx5/nrpgx5.go @@ -10,45 +10,44 @@ // you can set the tracer in the pgx.Config like this // ```go // import ( -// "context" // "github.com/jackc/pgx/v5" // "github.com/newrelic/go-agent/v3/integrations/nrpgx5" +// "github.com/newrelic/go-agent/v3/newrelic" // ) // // func main() { -// config, err := pgx.ParseConfig("postgres://user:password@localhost:5432/database") +// cfg, err := pgx.ParseConfig("postgres://postgres:postgres@localhost:5432") // if err != nil { // panic(err) -// } +// } // -// cfg.Tracer = nrpgx5.NewTracer() -// conn, err := pgx.ConnectConfig(context.Background(), cfg) -// if err != nil { -// panic(err) +// cfg.Tracer = nrpgx5.NewTracer() +// conn, err := pgx.ConnectConfig(context.Background(), cfg) +// if err != nil { +// panic(err) // } -// } +// ... // ``` // or you can set the tracer in the pgxpool.Config like this // ```go // import ( -// "context" -// "github.com/jackc/pgx/v5" // "github.com/jackc/pgx/v5/pgxpool" // "github.com/newrelic/go-agent/v3/integrations/nrpgx5" +// "github.com/newrelic/go-agent/v3/newrelic" // ) // // func main() { -// config, err := pgxpool.ParseConfig("postgres://user:password@localhost:5432/database") +// cfg, err := pgxpool.ParseConfig("postgres://postgres:postgres@localhost:5432") // if err != nil { // panic(err) -// } +// } // -// cfg.ConnConfig.Tracer = nrpgx5.NewTracer() -// conn, err := pgxpool.ConnectConfig(context.Background(), cfg) -// if err != nil { -// panic(err) +// cfg.ConnConfig.Tracer = nrpgx5.NewTracer() +// db, err := pgxpool.NewWithConfig(context.Background(), cfg) +// if err != nil { +// panic(err) // } -// } +// ... // ``` package nrpgx5 From e1ee0c84dc943469f5073cb1d402a029e0eb0d97 Mon Sep 17 00:00:00 2001 From: Emilio Garcia Date: Mon, 14 Nov 2022 14:51:20 -0500 Subject: [PATCH 3/6] prep nrpgx5 pr for release --- .github/workflows/ci.yaml | 3 +++ v3/integrations/nrpgx5/go.mod | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index e98575f41..3bd4e4863 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -188,6 +188,9 @@ jobs: - go-version: 1.17.x dirs: v3/integrations/nrpq extratesting: go get -u github.com/lib/pq@master + - go-version: 1.17.x + dirs: v3/integrations/nrpgx5 + extratesting: go get -u github.com/jackc/pgx/v5@master - go-version: 1.17.x dirs: v3/integrations/nrpq/example/sqlx - go-version: 1.17.x diff --git a/v3/integrations/nrpgx5/go.mod b/v3/integrations/nrpgx5/go.mod index 920f56b2f..5e8329cfc 100644 --- a/v3/integrations/nrpgx5/go.mod +++ b/v3/integrations/nrpgx5/go.mod @@ -6,7 +6,7 @@ require ( github.com/egon12/pgsnap v0.0.0-20221022154027-2847f0124ed8 github.com/jackc/pgx/v4 v4.17.2 // indirect github.com/jackc/pgx/v5 v5.0.3 - github.com/newrelic/go-agent/v3 v3.3.0 + github.com/newrelic/go-agent/v3 v3.20.0 github.com/stretchr/testify v1.8.0 golang.org/x/crypto v0.1.0 // indirect ) From 34f6cdf25f57a84fa9938b51a7822ba556a636d6 Mon Sep 17 00:00:00 2001 From: Emilio Garcia Date: Mon, 14 Nov 2022 14:54:51 -0500 Subject: [PATCH 4/6] nrpgx5 no extratesting due to deps issue --- .github/workflows/ci.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 3bd4e4863..fc06d3d15 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -190,7 +190,6 @@ jobs: extratesting: go get -u github.com/lib/pq@master - go-version: 1.17.x dirs: v3/integrations/nrpgx5 - extratesting: go get -u github.com/jackc/pgx/v5@master - go-version: 1.17.x dirs: v3/integrations/nrpq/example/sqlx - go-version: 1.17.x From 75fd2c351ad518334c4284614e4e638672d69dc6 Mon Sep 17 00:00:00 2001 From: Emilio Garcia Date: Mon, 14 Nov 2022 15:00:04 -0500 Subject: [PATCH 5/6] bump nrpgx5 to go version 1.17 to align with EOL policy --- .github/workflows/ci.yaml | 1 + v3/integrations/nrpgx5/go.mod | 4 +--- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index fc06d3d15..3bd4e4863 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -190,6 +190,7 @@ jobs: extratesting: go get -u github.com/lib/pq@master - go-version: 1.17.x dirs: v3/integrations/nrpgx5 + extratesting: go get -u github.com/jackc/pgx/v5@master - go-version: 1.17.x dirs: v3/integrations/nrpq/example/sqlx - go-version: 1.17.x diff --git a/v3/integrations/nrpgx5/go.mod b/v3/integrations/nrpgx5/go.mod index 5e8329cfc..318391be1 100644 --- a/v3/integrations/nrpgx5/go.mod +++ b/v3/integrations/nrpgx5/go.mod @@ -1,12 +1,10 @@ module github.com/newrelic/go-agent/v3/integrations/nrpgx5 -go 1.11 +go 1.17 require ( github.com/egon12/pgsnap v0.0.0-20221022154027-2847f0124ed8 - github.com/jackc/pgx/v4 v4.17.2 // indirect github.com/jackc/pgx/v5 v5.0.3 github.com/newrelic/go-agent/v3 v3.20.0 github.com/stretchr/testify v1.8.0 - golang.org/x/crypto v0.1.0 // indirect ) From 164c417b17f80f5ce08b2b591938ea0f5081ff6b Mon Sep 17 00:00:00 2001 From: Emilio Garcia Date: Mon, 14 Nov 2022 15:08:13 -0500 Subject: [PATCH 6/6] bump nrpgx5 test to go 1.18 --- .github/workflows/ci.yaml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 3bd4e4863..7e1ea8c19 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -188,9 +188,8 @@ jobs: - go-version: 1.17.x dirs: v3/integrations/nrpq extratesting: go get -u github.com/lib/pq@master - - go-version: 1.17.x + - go-version: 1.18.x dirs: v3/integrations/nrpgx5 - extratesting: go get -u github.com/jackc/pgx/v5@master - go-version: 1.17.x dirs: v3/integrations/nrpq/example/sqlx - go-version: 1.17.x