Skip to content

Commit

Permalink
Remove leftover usages of deprecated core APIs (#9390)
Browse files Browse the repository at this point in the history
  • Loading branch information
dmitryax authored Apr 21, 2022
1 parent 74f85d7 commit bff698a
Show file tree
Hide file tree
Showing 9 changed files with 31 additions and 29 deletions.
2 changes: 1 addition & 1 deletion cmd/otelcontribcol/main_windows.go
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ func checkUseInteractiveMode() (bool, error) {

func runService(params service.CollectorSettings) error {
// do not need to supply service name when startup is invoked through Service Control Manager directly
if err := svc.Run("", service.NewWindowsService(params)); err != nil {
if err := svc.Run("", service.NewSvcHandler(params)); err != nil {
return fmt.Errorf("failed to start collector server: %w", err)
}

Expand Down
24 changes: 12 additions & 12 deletions pkg/batchpersignal/batchpersignal_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -129,13 +129,13 @@ func TestSplitDifferentLogsIntoDifferentBatches(t *testing.T) {
library.SetName("first-library")
sl.LogRecords().EnsureCapacity(3)
firstLog := sl.LogRecords().AppendEmpty()
firstLog.SetName("first-batch-first-log")
firstLog.Body().SetStringVal("first-batch-first-log")
firstLog.SetTraceID(pcommon.NewTraceID([16]byte{1, 2, 3, 4}))
secondLog := sl.LogRecords().AppendEmpty()
secondLog.SetName("first-batch-second-log")
secondLog.Body().SetStringVal("first-batch-second-log")
secondLog.SetTraceID(pcommon.NewTraceID([16]byte{2, 3, 4, 5}))
thirdLog := sl.LogRecords().AppendEmpty()
thirdLog.SetName("first-batch-third-log")
thirdLog.Body().SetStringVal("first-batch-third-log")
// do not set traceID for third log

// test
Expand All @@ -147,17 +147,17 @@ func TestSplitDifferentLogsIntoDifferentBatches(t *testing.T) {
// first batch
firstOutILL := out[0].ResourceLogs().At(0).ScopeLogs().At(0)
assert.Equal(t, library.Name(), firstOutILL.Scope().Name())
assert.Equal(t, firstLog.Name(), firstOutILL.LogRecords().At(0).Name())
assert.Equal(t, firstLog.Body().StringVal(), firstOutILL.LogRecords().At(0).Body().StringVal())

// second batch
secondOutILL := out[1].ResourceLogs().At(0).ScopeLogs().At(0)
assert.Equal(t, library.Name(), secondOutILL.Scope().Name())
assert.Equal(t, secondLog.Name(), secondOutILL.LogRecords().At(0).Name())
assert.Equal(t, secondLog.Body().StringVal(), secondOutILL.LogRecords().At(0).Body().StringVal())

// third batch
thirdOutILL := out[2].ResourceLogs().At(0).ScopeLogs().At(0)
assert.Equal(t, library.Name(), thirdOutILL.Scope().Name())
assert.Equal(t, thirdLog.Name(), thirdOutILL.LogRecords().At(0).Name())
assert.Equal(t, thirdLog.Body().StringVal(), thirdOutILL.LogRecords().At(0).Body().StringVal())
}

func TestSplitLogsWithNilTraceID(t *testing.T) {
Expand Down Expand Up @@ -190,18 +190,18 @@ func TestSplitLogsSameTraceIntoDifferentBatches(t *testing.T) {
firstLibrary.SetName("first-library")
firstILS.LogRecords().EnsureCapacity(2)
firstLog := firstILS.LogRecords().AppendEmpty()
firstLog.SetName("first-batch-first-log")
firstLog.Body().SetStringVal("first-batch-first-log")
firstLog.SetTraceID(pcommon.NewTraceID([16]byte{1, 2, 3, 4}))
secondLog := firstILS.LogRecords().AppendEmpty()
secondLog.SetName("first-batch-second-log")
secondLog.Body().SetStringVal("first-batch-second-log")
secondLog.SetTraceID(pcommon.NewTraceID([16]byte{1, 2, 3, 4}))

// the second ILL has one log
secondILS := rl.ScopeLogs().AppendEmpty()
secondLibrary := secondILS.Scope()
secondLibrary.SetName("second-library")
thirdLog := secondILS.LogRecords().AppendEmpty()
thirdLog.SetName("second-batch-first-log")
thirdLog.Body().SetStringVal("second-batch-first-log")
thirdLog.SetTraceID(pcommon.NewTraceID([16]byte{1, 2, 3, 4}))

// test
Expand All @@ -213,11 +213,11 @@ func TestSplitLogsSameTraceIntoDifferentBatches(t *testing.T) {
// first batch
assert.Equal(t, pcommon.NewTraceID([16]byte{1, 2, 3, 4}), batches[0].ResourceLogs().At(0).ScopeLogs().At(0).LogRecords().At(0).TraceID())
assert.Equal(t, firstLibrary.Name(), batches[0].ResourceLogs().At(0).ScopeLogs().At(0).Scope().Name())
assert.Equal(t, firstLog.Name(), batches[0].ResourceLogs().At(0).ScopeLogs().At(0).LogRecords().At(0).Name())
assert.Equal(t, secondLog.Name(), batches[0].ResourceLogs().At(0).ScopeLogs().At(0).LogRecords().At(1).Name())
assert.Equal(t, firstLog.Body().StringVal(), batches[0].ResourceLogs().At(0).ScopeLogs().At(0).LogRecords().At(0).Body().StringVal())
assert.Equal(t, secondLog.Body().StringVal(), batches[0].ResourceLogs().At(0).ScopeLogs().At(0).LogRecords().At(1).Body().StringVal())

// second batch
assert.Equal(t, pcommon.NewTraceID([16]byte{1, 2, 3, 4}), batches[1].ResourceLogs().At(0).ScopeLogs().At(0).LogRecords().At(0).TraceID())
assert.Equal(t, secondLibrary.Name(), batches[1].ResourceLogs().At(0).ScopeLogs().At(0).Scope().Name())
assert.Equal(t, thirdLog.Name(), batches[1].ResourceLogs().At(0).ScopeLogs().At(0).LogRecords().At(0).Name())
assert.Equal(t, thirdLog.Body().StringVal(), batches[1].ResourceLogs().At(0).ScopeLogs().At(0).LogRecords().At(0).Body().StringVal())
}
2 changes: 1 addition & 1 deletion pkg/translator/prometheusremotewrite/helper_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -657,7 +657,7 @@ func TestAddResourceTargetInfo(t *testing.T) {

func TestMostRecentTimestampInMetric(t *testing.T) {
laterTimestamp := pcommon.NewTimestampFromTime(testdata.TestMetricTime.Add(1 * time.Minute))
metricMultipleTimestamps := testdata.GenerateMetricsOneMetric().ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0)
metricMultipleTimestamps := testdata.GenerateMetricsOneMetric().ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(0)
// the first datapoint timestamp is at testdata.TestMetricTime
metricMultipleTimestamps.Sum().DataPoints().At(1).SetTimestamp(laterTimestamp)
for _, tc := range []struct {
Expand Down
8 changes: 4 additions & 4 deletions receiver/activedirectorydsreceiver/scraper.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ import (
"time"

"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/model/pdata"
"go.opentelemetry.io/collector/pdata/pcommon"
"go.opentelemetry.io/collector/pdata/pmetric"
"go.opentelemetry.io/collector/receiver/scrapererror"
"go.uber.org/multierr"

Expand Down Expand Up @@ -55,7 +55,7 @@ func (a *activeDirectoryDSScraper) start(ctx context.Context, host component.Hos
return nil
}

func (a *activeDirectoryDSScraper) scrape(ctx context.Context) (pdata.Metrics, error) {
func (a *activeDirectoryDSScraper) scrape(ctx context.Context) (pmetric.Metrics, error) {
var multiErr error
now := pcommon.NewTimestampFromTime(time.Now())

Expand Down Expand Up @@ -255,10 +255,10 @@ func (a *activeDirectoryDSScraper) scrape(ctx context.Context) (pdata.Metrics, e
}

if multiErr != nil {
return pdata.Metrics(a.mb.Emit()), scrapererror.NewPartialScrapeError(multiErr, len(multierr.Errors(multiErr)))
return pmetric.Metrics(a.mb.Emit()), scrapererror.NewPartialScrapeError(multiErr, len(multierr.Errors(multiErr)))
}

return pdata.Metrics(a.mb.Emit()), nil
return pmetric.Metrics(a.mb.Emit()), nil
}

func (a *activeDirectoryDSScraper) shutdown(ctx context.Context) error {
Expand Down
4 changes: 2 additions & 2 deletions receiver/iisreceiver/internal/metadata/recorder.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,10 @@
package metadata // import "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/iisreceiver/internal/metadata"

import (
"go.opentelemetry.io/collector/model/pdata"
"go.opentelemetry.io/collector/pdata/pcommon"
)

func (mb *MetricsBuilder) RecordAny(ts pdata.Timestamp, val float64, name string, attributes map[string]string) {
func (mb *MetricsBuilder) RecordAny(ts pcommon.Timestamp, val float64, name string, attributes map[string]string) {
switch name {
case "iis.connection.active":
mb.RecordIisConnectionActiveDataPoint(ts, int64(val))
Expand Down
7 changes: 4 additions & 3 deletions receiver/iisreceiver/scraper.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,8 @@ import (

"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/consumer"
"go.opentelemetry.io/collector/model/pdata"
"go.opentelemetry.io/collector/pdata/pcommon"
"go.opentelemetry.io/collector/pdata/pmetric"
"go.opentelemetry.io/collector/receiver/scrapererror"
"go.uber.org/multierr"
"go.uber.org/zap"
Expand Down Expand Up @@ -66,9 +67,9 @@ func (rcvr *iisReceiver) start(ctx context.Context, host component.Host) error {
}

// scrape pulls counter values from the watchers
func (rcvr *iisReceiver) scrape(ctx context.Context) (pdata.Metrics, error) {
func (rcvr *iisReceiver) scrape(ctx context.Context) (pmetric.Metrics, error) {
var errs error
now := pdata.NewTimestampFromTime(time.Now())
now := pcommon.NewTimestampFromTime(time.Now())

for _, watcher := range rcvr.watchers {
counterValues, err := watcher.ScrapeData()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -667,7 +667,7 @@ func verifyRelabelJobInstance(t *testing.T, td *testData, rms []*pmetric.Resourc
wantAttributes.Update("net.host.port", pcommon.NewValueString(""))
wantAttributes.Insert("net.host.name", pcommon.NewValueString("relabeled-instance"))

metrics1 := rms[0].InstrumentationLibraryMetrics().At(0).Metrics()
metrics1 := rms[0].ScopeMetrics().At(0).Metrics()
ts1 := metrics1.At(0).Gauge().DataPoints().At(0).Timestamp()
doCompare(t, "relabel-job-instance", wantAttributes, rms[0], []testExpectation{
assertMetricPresent("jvm_memory_bytes_used",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,10 @@
package metadata // import "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/sqlserverreceiver/internal/metadata"

import (
"go.opentelemetry.io/collector/model/pdata"
"go.opentelemetry.io/collector/pdata/pcommon"
)

func (mb *MetricsBuilder) RecordAnyDataPoint(ts pdata.Timestamp, val float64, name string, attributes map[string]string) {
func (mb *MetricsBuilder) RecordAnyDataPoint(ts pcommon.Timestamp, val float64, name string, attributes map[string]string) {
switch name {
case "sqlserver.user.connection.count":
mb.RecordSqlserverUserConnectionCountDataPoint(ts, val)
Expand Down
7 changes: 4 additions & 3 deletions receiver/sqlserverreceiver/scraper.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,8 @@ import (
"time"

"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/model/pdata"
"go.opentelemetry.io/collector/pdata/pcommon"
"go.opentelemetry.io/collector/pdata/pmetric"
"go.uber.org/multierr"
"go.uber.org/zap"

Expand Down Expand Up @@ -62,7 +63,7 @@ func (s *sqlServerScraper) start(ctx context.Context, host component.Host) error
}

// scrape collects windows performance counter data from all watchers and then records/emits it using the metricBuilder
func (s *sqlServerScraper) scrape(ctx context.Context) (pdata.Metrics, error) {
func (s *sqlServerScraper) scrape(ctx context.Context) (pmetric.Metrics, error) {
metricsByDatabase, errs := createMetricGroupPerDatabase(s.watchers)

for key, metricGroup := range metricsByDatabase {
Expand Down Expand Up @@ -97,7 +98,7 @@ func createMetricGroupPerDatabase(watchers []windowsapi.PerfCounterWatcher) (map
}

func (s *sqlServerScraper) emitMetricGroup(metricGroup []winperfcounters.CounterValue, databaseName string) {
now := pdata.NewTimestampFromTime(time.Now())
now := pcommon.NewTimestampFromTime(time.Now())

for _, metric := range metricGroup {
s.metricsBuilder.RecordAnyDataPoint(now, metric.Value, metric.MetricRep.Name, metric.MetricRep.Attributes)
Expand Down

0 comments on commit bff698a

Please sign in to comment.