fgerlits commented on a change in pull request #1231:
URL: https://github.com/apache/nifi-minifi-cpp/pull/1231#discussion_r778933470
##########
File path: thirdparty/google-styleguide/run_linter.py
##########
@@ -9,18 +12,48 @@
list_of_files = list()
for include_path in args.includePaths:
- for (dir_path, dir_names, file_names) in os.walk(include_path):
- for file_name in file_names:
- if (".h" in file_name) or (".cpp" in file_name):
- list_of_files += [os.path.join(dir_path, file_name)]
+ for (dir_path, dir_names, file_names) in os.walk(include_path):
+ for file_name in file_names:
+ if (".h" in file_name) or (".cpp" in file_name):
+ list_of_files += [os.path.join(dir_path, file_name)]
script_dir = os.path.dirname(os.path.realpath(__file__))
repository_path = os.path.abspath(os.path.join(script_dir, os.pardir,
os.pardir))
arg_list = list()
arg_list.append("--linelength=200")
arg_list.append("--repository=" + repository_path)
-if (args.quiet):
- arg_list.append("--quiet")
+if args.quiet:
+ arg_list.append("--quiet")
-cpplint.main(arg_list + list_of_files)
+
+def cpplint_main_wrapper(file_list):
+ try:
+ cpplint.main(arg_list + file_list)
+ return 0
+ except SystemExit as err:
+ return err.code
+
+
+if __name__ == '__main__':
+ # break up list_of_files to ~equal chunks
+ chunk_num = multiprocessing.cpu_count()
+ chunk_size = math.ceil(len(list_of_files) / chunk_num)
+ chunks = []
+ chunk_begin = 0
+ chunk_end = chunk_size
+ for chunk_cnt in range(chunk_num):
+ chunks.append(list_of_files[chunk_begin:chunk_end])
+ chunk_begin += chunk_size
+ if chunk_begin >= len(list_of_files):
+ break
+ chunk_end += chunk_size
+ if chunk_end > len(list_of_files):
+ chunk_end = len(list_of_files)
Review comment:
`pip/pip3` installs the packages to the current user's home directory,
so not quite system-wide, but yeah, maybe not worth it
yes,
https://toolz.readthedocs.io/en/latest/api.html#toolz.itertoolz.partition is
not what we need; I suggested
https://toolz.readthedocs.io/en/latest/api.html#toolz.itertoolz.partition_all
which allows the last chunk to be smaller
how about factoring out the chunking code; I think that would improve the
readability without introducing a new dependency:
```suggestion
def create_chunks(chunk_size, items): # move to before if __name__ ==
'__main__':
chunks = []
chunk_begin = 0
chunk_end = chunk_size
for chunk_cnt in range(chunk_num):
chunks.append(items[chunk_begin:chunk_end])
chunk_begin += chunk_size
if chunk_begin >= len(list_of_files):
break
chunk_end += chunk_size
if chunk_end > len(list_of_files):
chunk_end = len(list_of_files)
return chunks
chunks = create_chunks(chunk_size, list_of_files)
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]