- Notifications
You must be signed in to change notification settings - Fork 4.3k
/
Copy pathwordcount_test.py
72 lines (61 loc) · 2.41 KB
/
wordcount_test.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Test for the wordcount example."""
# pytype: skip-file
importcollections
importlogging
importre
importunittest
importuuid
importpytest
fromapache_beam.examples.dataframeimportwordcount
fromapache_beam.testing.test_pipelineimportTestPipeline
fromapache_beam.testing.test_utilsimportcreate_file
fromapache_beam.testing.test_utilsimportread_files_from_pattern
classWordCountTest(unittest.TestCase):
SAMPLE_TEXT="""
a
a b
a b c
loooooonger words
"""
@pytest.mark.examples_postcommit
deftest_basics(self):
test_pipeline=TestPipeline(is_integration_test=True)
# Setup the files with expected content.
temp_location=test_pipeline.get_option('temp_location')
temp_path='/'.join([temp_location, str(uuid.uuid4())])
input=create_file('/'.join([temp_path, 'input.txt']), self.SAMPLE_TEXT)
expected_words=collections.defaultdict(int)
forwordinre.findall(r'[\w]+', self.SAMPLE_TEXT):
expected_words[word] +=1
extra_opts= {'input': input, 'output': '%s.result'%temp_path}
wordcount.run(test_pipeline.get_full_options_as_args(**extra_opts))
# Parse result file and compare.
results= []
lines=read_files_from_pattern(temp_path+'.result*').splitlines()
forlineinlines:
match=re.search(r'(\S+),([0-9]+)', line)
ifmatchisnotNone:
results.append((match.group(1), int(match.group(2))))
elifline.strip():
self.assertEqual(line.strip(), 'word,count')
self.assertEqual(sorted(results), sorted(expected_words.items()))
if__name__=='__main__':
logging.getLogger().setLevel(logging.INFO)
unittest.main()