lostluck commented on code in PR #29590:
URL: https://github.com/apache/beam/pull/29590#discussion_r1416404011


##########
sdks/go/pkg/beam/core/runtime/exec/pcollection.go:
##########
@@ -96,6 +102,13 @@ func (p *PCollection) ProcessElement(ctx context.Context, 
elm *FullValue, values
                var w byteCounter
                p.elementCoder.Encode(elm, &w)
                p.addSize(int64(w.count))
+
+               if p.dataSampler != nil {
+                       var buf bytes.Buffer
+                       EncodeWindowedValueHeader(p.windowCoder, elm.Windows, 
elm.Timestamp, elm.Pane, &buf)
+                       p.elementCoder.Encode(elm, &buf)

Review Comment:
   With this approach, we're encoding the element twice; once to get a size 
estimate in line 103, and another to actually get the encoded value. I'd 
recommend we have a branch instead, and do a bit of arithmetic to avoid 
doubling the encoding cost on sampling.
   
   ```
                if p.datasampler == nil {
                        var w byteCounter
                        p.elementCoder.Encode(elm, &w)
                        p.addSize(int64(w.count))
                } else {
                        var buf bytes.Buffer
                        EncodeWindowedValueHeader(p.windowCoder, elm.Windows, 
elm.Timestamp, elm.Pane, &buf)
                        size := buf.Len()
                        p.elementCoder.Encode(elm, &buf)
                        p.addSize(int64(buf.Len() - size))
                        p.dataSampler.SendSample(p.PColID, buf.Bytes(), 
time.Now())
                }
   ```



##########
sdks/go/pkg/beam/core/runtime/exec/datasampler.go:
##########
@@ -0,0 +1,156 @@
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements.  See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License.  You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package exec
+
+import (
+       "context"
+       "sync"
+       "time"
+)
+
+// DataSample contains property for sampled element
+type DataSample struct {

Review Comment:
   This type isn't used outside of the exec package, prefer making this type 
unexported (rename it `dataSample`).



##########
sdks/go/pkg/beam/core/runtime/exec/datasampler.go:
##########
@@ -0,0 +1,156 @@
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements.  See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License.  You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package exec
+
+import (
+       "context"
+       "sync"
+       "time"
+)
+
+// DataSample contains property for sampled element
+type DataSample struct {
+       PCollectionID string
+       Timestamp     time.Time
+       Element       []byte
+}
+
+// DataSampler manages sampled elements based on PCollectionID
+type DataSampler struct {
+       sampleChannel chan *DataSample
+       samplesMap    sync.Map // Key: PCollectionID string, Value: 
*OutputSamples pointer
+       ctx           context.Context
+}
+
+// NewDataSampler inits a new Data Sampler object and returns pointer to it.
+func NewDataSampler(ctx context.Context) *DataSampler {
+       return &DataSampler{
+               sampleChannel: make(chan *DataSample, 1000),
+               ctx:           ctx,
+       }
+}
+
+// Process processes sampled element.
+func (d *DataSampler) Process() {
+       for {
+               select {
+               case <-d.ctx.Done():
+                       return
+               case sample := <-d.sampleChannel:
+                       d.addSample(sample)
+               }
+       }
+}
+
+// GetSamples returns samples for given pCollectionID.
+// If no pCollectionID is provided, return all available samples
+func (d *DataSampler) GetSamples(pids []string) map[string][]*DataSample {
+       if len(pids) == 0 {
+               return d.getAllSamples()
+       }
+       return d.getSamplesForPCollections(pids)
+}
+
+// SendSample is called by PCollection Node to send sampled element to Data 
Sampler async
+func (d *DataSampler) SendSample(pCollectionID string, element []byte, 
timestamp time.Time) {
+       sample := DataSample{
+               PCollectionID: pCollectionID,
+               Element:       element,
+               Timestamp:     timestamp,
+       }
+       d.sampleChannel <- &sample
+}
+
+func (d *DataSampler) getAllSamples() map[string][]*DataSample {
+       var res = make(map[string][]*DataSample)
+       d.samplesMap.Range(func(key any, value any) bool {
+               pid := key.(string)
+               samples := d.getSamples(pid)
+               if len(samples) > 0 {
+                       res[pid] = samples
+               }
+               return true
+       })
+       return res
+}
+
+func (d *DataSampler) getSamplesForPCollections(pids []string) 
map[string][]*DataSample {
+       var res = make(map[string][]*DataSample)
+       for _, pid := range pids {
+               samples := d.getSamples(pid)
+               if len(samples) > 0 {
+                       res[pid] = samples
+               }
+       }
+       return res
+}
+
+func (d *DataSampler) addSample(sample *DataSample) {
+       p, ok := d.samplesMap.Load(sample.PCollectionID)
+       if !ok {
+               p = &outputSamples{maxElements: 10, numSamples: 0, sampleIndex: 
0}
+               d.samplesMap.Store(sample.PCollectionID, p)
+       }
+       outputSamples := p.(*outputSamples)
+       outputSamples.addSample(sample)
+}
+
+func (d *DataSampler) getSamples(pCollectionID string) []*DataSample {
+       p, ok := d.samplesMap.Load(pCollectionID)
+       if !ok {
+               return nil
+       }
+       outputSamples := p.(*outputSamples)
+       return outputSamples.getSamples()
+}
+
+type outputSamples struct {
+       elements    []*DataSample
+       mu          sync.Mutex
+       maxElements int
+       numSamples  int

Review Comment:
   I think you could remove the `numSamples` field, since you could do 
`len(o.elements)` instead to check the current number of samples.
   
   A very tricky implementation could avoid `maxElements`, by allocating a 
slice `make([]*DataSample,0, MAX)` when the elements slice is nil, and then 
check against the `cap(o.elements)` but I think that code would be much harder 
to understand, and not worth the savings of a an int per PCollection.



##########
sdks/go/pkg/beam/core/runtime/harness/harness.go:
##########
@@ -49,6 +50,7 @@ const URNMonitoringInfoShortID = 
"beam:protocol:monitoring_info_short_ids:v1"
 type Options struct {
        RunnerCapabilities []string // URNs for what runners are able to 
understand over the FnAPI.
        StatusEndpoint     string   // Endpoint for worker status reporting.
+       EnableDataSampling bool     // Enable data sampling feature

Review Comment:
   I've spoken with Sam and noted, that the current design is because Python 
doesn't used Runner capabilites properly, even though they're set in the 
container image. That's fine.
   
   But effectively, what I'd like then is to append the data sampling URN to 
the received RunnerCapabilities slice, and then use the existing map to 
activate the feature in harness/harness.go instead of adding a boolean field.
   
   That way the downstream code is already keying off the correct spot long 
term (where the feature is enabled via the capability being present) so it can 
eventually be cleaned up once Dataflow is setting it properly. This allows 
other runners (like the local Prism runner) to eventually implement 
datasampling in a measured fashion.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to