Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove python 3.7 support #27194

Merged
merged 8 commits into from
Jun 22, 2023
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 23 additions & 23 deletions .test-infra/metrics/sync/jenkins/syncjenkins.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def fetchJobs():
url = ('https://ci-beam.apache.org/api/json'
'?tree=jobs[name,url,lastCompletedBuild[id]]&depth=1')
r = requests.get(url)
jobs = r.json()[u'jobs']
jobs = r.json()['jobs']
result = map(lambda x: (x['name'],
int(x['lastCompletedBuild']['id'])
if x['lastCompletedBuild'] is not None
Expand Down Expand Up @@ -122,31 +122,31 @@ def fetchBuildsForJob(jobUrl):
f'estimatedDuration,fullDisplayName,actions[{durFields}]')
url = f'{jobUrl}api/json?depth=1&tree=builds[{fields}]'
r = requests.get(url)
return r.json()[u'builds']
return r.json()['builds']


def buildRowValuesArray(jobName, build):
timings = next((x
for x in build[u'actions']
if (u'_class' in x)
and (x[u'_class'] == u'jenkins.metrics.impl.TimeInQueueAction')),
for x in build['actions']
if ('_class' in x)
and (x['_class'] == 'jenkins.metrics.impl.TimeInQueueAction')),
None)
values = [jobName,
int(build[u'id']),
build[u'url'],
build[u'result'],
datetime.fromtimestamp(build[u'timestamp'] / 1000),
build[u'builtOn'],
build[u'duration'],
build[u'estimatedDuration'],
build[u'fullDisplayName'],
timings[u'blockedDurationMillis'] if timings is not None else -1,
timings[u'buildableDurationMillis'] if timings is not None else -1,
timings[u'buildingDurationMillis'] if timings is not None else -1,
timings[u'executingTimeMillis'] if timings is not None else -1,
timings[u'queuingDurationMillis'] if timings is not None else -1,
timings[u'totalDurationMillis'] if timings is not None else -1,
timings[u'waitingDurationMillis'] if timings is not None else -1]
int(build['id']),
build['url'],
build['result'],
datetime.fromtimestamp(build['timestamp'] / 1000),
build['builtOn'],
build['duration'],
build['estimatedDuration'],
build['fullDisplayName'],
timings['blockedDurationMillis'] if timings is not None else -1,
timings['buildableDurationMillis'] if timings is not None else -1,
timings['buildingDurationMillis'] if timings is not None else -1,
timings['executingTimeMillis'] if timings is not None else -1,
timings['queuingDurationMillis'] if timings is not None else -1,
timings['totalDurationMillis'] if timings is not None else -1,
timings['waitingDurationMillis'] if timings is not None else -1]
return values


Expand All @@ -168,16 +168,16 @@ def fetchNewData():
syncedJobId = syncedJobs[newJobName] if newJobName in syncedJobs else -1
if newJobLastBuildId > syncedJobId:
builds = fetchBuildsForJob(newJobUrl)
builds = [x for x in builds if int(x[u'id']) > syncedJobId]
builds = [x for x in builds if int(x['id']) > syncedJobId]

connection = initConnection()
cursor = connection.cursor()

for build in builds:
if build[u'building']:
if build['building']:
continue;
rowValues = buildRowValuesArray(newJobName, build)
print("inserting", newJobName, build[u'id'])
print("inserting", newJobName, build['id'])
insertRow(cursor, rowValues)

cursor.close()
Expand Down
6 changes: 3 additions & 3 deletions sdks/python/apache_beam/coders/coders_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def test_proto_coder(self):
ma = test_message.MessageA()
mb = ma.field2.add()
mb.field1 = True
ma.field1 = u'hello world'
ma.field1 = 'hello world'
expected_coder = coders.ProtoCoder(ma.__class__)
real_coder = coders_registry.get_coder(ma.__class__)
self.assertEqual(expected_coder, real_coder)
Expand All @@ -90,7 +90,7 @@ def test_deterministic_proto_coder(self):
ma = test_message.MessageA()
mb = ma.field2.add()
mb.field1 = True
ma.field1 = u'hello world'
ma.field1 = 'hello world'
expected_coder = coders.DeterministicProtoCoder(ma.__class__)
real_coder = (
coders_registry.get_coder(
Expand Down Expand Up @@ -130,7 +130,7 @@ class ProtoPlusCoderTest(unittest.TestCase):
def test_proto_plus_coder(self):
ma = ProtoPlusMessageA()
ma.field2 = [ProtoPlusMessageB(field1=True)]
ma.field1 = u'hello world'
ma.field1 = 'hello world'
expected_coder = coders.ProtoPlusCoder(ma.__class__)
real_coder = coders_registry.get_coder(ma.__class__)
self.assertTrue(issubclass(ma.__class__, proto.Message))
Expand Down
12 changes: 6 additions & 6 deletions sdks/python/apache_beam/coders/coders_test_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ class CodersTest(unittest.TestCase):
-1,
1.5,
b'str\0str',
u'unicode\0\u0101',
'unicode\0\u0101',
(),
(1, 2, 3),
[],
Expand Down Expand Up @@ -407,7 +407,7 @@ def test_tuple_coder(self):
coders.TupleCoder((coders.PickleCoder(), coders.VarIntCoder())),
coders.StrUtf8Coder(),
coders.BooleanCoder())), ((1, 2), 'a', True),
((-2, 5), u'a\u0101' * 100, False), ((300, 1), 'abc\0' * 5, True))
((-2, 5), 'a\u0101' * 100, False), ((300, 1), 'abc\0' * 5, True))

def test_tuple_sequence_coder(self):
int_tuple_coder = coders.TupleSequenceCoder(coders.VarIntCoder())
Expand All @@ -420,7 +420,7 @@ def test_base64_pickle_coder(self):
self.check_coder(coders.Base64PickleCoder(), 'a', 1, 1.5, (1, 2, 3))

def test_utf8_coder(self):
self.check_coder(coders.StrUtf8Coder(), 'a', u'ab\u00FF', u'\u0101\0')
self.check_coder(coders.StrUtf8Coder(), 'a', 'ab\u00FF', '\u0101\0')

def test_iterable_coder(self):
iterable_coder = coders.IterableCoder(coders.VarIntCoder())
Expand Down Expand Up @@ -604,10 +604,10 @@ def test_proto_coder(self):
ma = test_message.MessageA()
mab = ma.field2.add()
mab.field1 = True
ma.field1 = u'hello world'
ma.field1 = 'hello world'

mb = test_message.MessageA()
mb.field1 = u'beam'
mb.field1 = 'beam'

proto_coder = coders.ProtoCoder(ma.__class__)
self.check_coder(proto_coder, ma)
Expand Down Expand Up @@ -666,7 +666,7 @@ def __iter__(self):

# Test nested tuple observable.
coder = coders.TupleCoder((coders.StrUtf8Coder(), iter_coder))
value = (u'123', observ)
value = ('123', observ)
self.assertEqual(
coder.get_impl().get_estimated_size_and_observables(value)[1],
[(observ, elem_coder.get_impl())])
Expand Down
12 changes: 6 additions & 6 deletions sdks/python/apache_beam/dataframe/transforms_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,8 +213,8 @@ def test_batching_beam_row_input(self):
with beam.Pipeline() as p:
result = (
p
| beam.Create([(u'Falcon', 380.), (u'Falcon', 370.), (u'Parrot', 24.),
(u'Parrot', 26.)])
| beam.Create([('Falcon', 380.), ('Falcon', 370.), ('Parrot', 24.),
('Parrot', 26.)])
| beam.Map(lambda tpl: beam.Row(Animal=tpl[0], Speed=tpl[1]))
| transforms.DataframeTransform(
lambda df: df.groupby('Animal').mean(), include_indexes=True))
Expand All @@ -225,8 +225,8 @@ def test_batching_beam_row_to_dataframe(self):
with beam.Pipeline() as p:
df = convert.to_dataframe(
p
| beam.Create([(u'Falcon', 380.), (u'Falcon', 370.), (
u'Parrot', 24.), (u'Parrot', 26.)])
| beam.Create([('Falcon', 380.), ('Falcon', 370.), ('Parrot', 24.), (
'Parrot', 26.)])
| beam.Map(lambda tpl: beam.Row(Animal=tpl[0], Speed=tpl[1])))

result = convert.to_pcollection(
Expand Down Expand Up @@ -260,8 +260,8 @@ def test_unbatching_series(self):
with beam.Pipeline() as p:
result = (
p
| beam.Create([(u'Falcon', 380.), (u'Falcon', 370.), (u'Parrot', 24.),
(u'Parrot', 26.)])
| beam.Create([('Falcon', 380.), ('Falcon', 370.), ('Parrot', 24.),
('Parrot', 26.)])
| beam.Map(lambda tpl: beam.Row(Animal=tpl[0], Speed=tpl[1]))
| transforms.DataframeTransform(lambda df: df.Animal))

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@
import google.cloud.bigtable.instance

EXISTING_INSTANCES = [] # type: List[google.cloud.bigtable.instance.Instance]
LABEL_KEY = u'python-bigtable-beam'
LABEL_KEY = 'python-bigtable-beam'
label_stamp = datetime.datetime.utcnow().replace(tzinfo=UTC)
label_stamp_micros = _microseconds_from_datetime(label_stamp)
LABELS = {LABEL_KEY: str(label_stamp_micros)}
Expand Down
4 changes: 2 additions & 2 deletions sdks/python/apache_beam/examples/snippets/snippets_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -614,7 +614,7 @@ def test_model_pipelines(self):
snippets.model_pipelines()
self.assertEqual(
self.get_output(result_path),
[str(s) for s in [(u'aa', 1), (u'bb', 2), (u'cc', 3)]])
[str(s) for s in [('aa', 1), ('bb', 2), ('cc', 3)]])

def test_model_pcollection(self):
temp_path = self.create_temp_file()
Expand Down Expand Up @@ -863,7 +863,7 @@ def _inner(topic=None, subscription=None):
input_topic = 'projects/fake-beam-test-project/topic/intopic'
input_values = [
TimestampedValue(b'a a b', 1),
TimestampedValue(u'🤷 ¯\\_(ツ)_/¯ b b '.encode('utf-8'), 12),
TimestampedValue('🤷 ¯\\_(ツ)_/¯ b b '.encode('utf-8'), 12),
TimestampedValue(b'a b c c c', 20)
]
output_topic = 'projects/fake-beam-test-project/topic/outtopic'
Expand Down
2 changes: 1 addition & 1 deletion sdks/python/apache_beam/examples/wordcount_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
class WordCountTest(unittest.TestCase):

SAMPLE_TEXT = (
u'a b c a b a\nacento gráfico\nJuly 30, 2018\n\n aa bb cc aa bb aa')
'a b c a b a\nacento gráfico\nJuly 30, 2018\n\n aa bb cc aa bb aa')

def test_basics(self):
test_pipeline = TestPipeline(is_integration_test=True)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class PicklerTest(unittest.TestCase):
NO_MAPPINGPROXYTYPE = not hasattr(types, "MappingProxyType")

def test_basics(self):
self.assertEqual([1, 'a', (u'z', )], loads(dumps([1, 'a', (u'z', )])))
self.assertEqual([1, 'a', ('z', )], loads(dumps([1, 'a', ('z', )])))
fun = lambda x: 'xyz-%s' % x
self.assertEqual('xyz-abc', loads(dumps(fun))('abc'))

Expand Down
2 changes: 1 addition & 1 deletion sdks/python/apache_beam/internal/pickler_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ class PicklerTest(unittest.TestCase):
NO_MAPPINGPROXYTYPE = not hasattr(types, "MappingProxyType")

def test_basics(self):
self.assertEqual([1, 'a', (u'z', )], loads(dumps([1, 'a', (u'z', )])))
self.assertEqual([1, 'a', ('z', )], loads(dumps([1, 'a', ('z', )])))
fun = lambda x: 'xyz-%s' % x
self.assertEqual('xyz-abc', loads(dumps(fun))('abc'))

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def test_xlang_parquetio_write(self):
AvroRecord({"name": "ghi"})]) \
| beam.ExternalTransform(
PARQUET_WRITE_URN,
ImplicitSchemaPayloadBuilder({'data': u'/tmp/test.parquet'}),
ImplicitSchemaPayloadBuilder({'data': '/tmp/test.parquet'}),
address)
except RuntimeError as e:
if re.search(PARQUET_WRITE_URN, str(e)):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@
NEW_TYPES_QUERY = ('SELECT bytes, date, time FROM [%s.%s]')
DIALECT_OUTPUT_SCHEMA = ('{"fields": [{"name": "fruit","type": "STRING"}]}')
DIALECT_OUTPUT_VERIFY_QUERY = ('SELECT fruit from `%s`;')
DIALECT_OUTPUT_EXPECTED = [(u'apple', ), (u'orange', )]
DIALECT_OUTPUT_EXPECTED = [('apple', ), ('orange', )]


class BigQueryQueryToTableIT(unittest.TestCase):
Expand Down
34 changes: 17 additions & 17 deletions sdks/python/apache_beam/io/gcp/bigquery_read_it_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,9 +125,9 @@ class ReadTests(BigQueryReadIntegrationTests):
}, {
'number': 2, 'str': 'def'
}, {
'number': 3, 'str': u'你好'
'number': 3, 'str': '你好'
}, {
'number': 4, 'str': u'привет'
'number': 4, 'str': 'привет'
}]

@classmethod
Expand Down Expand Up @@ -309,14 +309,14 @@ def test_table_schema_retrieve_with_direct_read(self):
class ReadUsingStorageApiTests(BigQueryReadIntegrationTests):
TABLE_DATA = [{
'number': 1,
'string': u'你好',
'string': '你好',
'time': '12:44:31',
'datetime': '2018-12-31 12:44:31',
'rec': None
},
{
'number': 4,
'string': u'привет',
'string': 'привет',
'time': '12:44:31',
'datetime': '2018-12-31 12:44:31',
'rec': {
Expand Down Expand Up @@ -425,14 +425,14 @@ def test_iobase_source(self):
EXPECTED_TABLE_DATA = [
{
'number': 1,
'string': u'你好',
'string': '你好',
'time': datetime.time(12, 44, 31),
'datetime': '2018-12-31T12:44:31',
'rec': None,
},
{
'number': 4,
'string': u'привет',
'string': 'привет',
'time': datetime.time(12, 44, 31),
'datetime': '2018-12-31T12:44:31',
'rec': {
Expand All @@ -455,14 +455,14 @@ def test_iobase_source_with_native_datetime(self):
EXPECTED_TABLE_DATA = [
{
'number': 1,
'string': u'你好',
'string': '你好',
'time': datetime.time(12, 44, 31),
'datetime': datetime.datetime(2018, 12, 31, 12, 44, 31),
'rec': None,
},
{
'number': 4,
'string': u'привет',
'string': 'привет',
'time': datetime.time(12, 44, 31),
'datetime': datetime.datetime(2018, 12, 31, 12, 44, 31),
'rec': {
Expand Down Expand Up @@ -497,7 +497,7 @@ def test_iobase_source_with_column_selection(self):
def test_iobase_source_with_row_restriction(self):
EXPECTED_TABLE_DATA = [{
'number': 1,
'string': u'你好',
'string': '你好',
'time': datetime.time(12, 44, 31),
'datetime': datetime.datetime(2018, 12, 31, 12, 44, 31),
'rec': None
Expand All @@ -513,7 +513,7 @@ def test_iobase_source_with_row_restriction(self):

@pytest.mark.it_postcommit
def test_iobase_source_with_column_selection_and_row_restriction(self):
EXPECTED_TABLE_DATA = [{'string': u'привет'}]
EXPECTED_TABLE_DATA = [{'string': 'привет'}]
with beam.Pipeline(argv=self.args) as p:
result = (
p | 'Read with BigQuery Storage API' >> beam.io.ReadFromBigQuery(
Expand Down Expand Up @@ -541,14 +541,14 @@ def test_iobase_source_with_query(self):
EXPECTED_TABLE_DATA = [
{
'number': 1,
'string': u'你好',
'string': '你好',
'time': datetime.time(12, 44, 31),
'datetime': datetime.datetime(2018, 12, 31, 12, 44, 31),
'rec': None,
},
{
'number': 4,
'string': u'привет',
'string': 'привет',
'time': datetime.time(12, 44, 31),
'datetime': datetime.datetime(2018, 12, 31, 12, 44, 31),
'rec': {
Expand All @@ -573,7 +573,7 @@ def test_iobase_source_with_query(self):

@pytest.mark.it_postcommit
def test_iobase_source_with_query_and_filters(self):
EXPECTED_TABLE_DATA = [{'string': u'привет'}]
EXPECTED_TABLE_DATA = [{'string': 'привет'}]
query = StaticValueProvider(str, self.query)
with beam.Pipeline(argv=self.args) as p:
result = (
Expand Down Expand Up @@ -713,19 +713,19 @@ class ReadAllBQTests(BigQueryReadIntegrationTests):
}, {
'number': 2, 'str': 'def'
}, {
'number': 3, 'str': u'你好'
'number': 3, 'str': '你好'
}, {
'number': 4, 'str': u'привет'
'number': 4, 'str': 'привет'
}]

TABLE_DATA_2 = [{
'number': 10, 'str': 'abcd'
}, {
'number': 20, 'str': 'defg'
}, {
'number': 30, 'str': u'你好'
'number': 30, 'str': '你好'
}, {
'number': 40, 'str': u'привет'
'number': 40, 'str': 'привет'
}]

TABLE_DATA_3 = [{'number': 10, 'str': 'abcde', 'extra': 3}]
Expand Down
Loading