Type change to match Vec_Get_Size. Widen the types of variables which are assigned the return value from Vec_Get_Size. The widening should have no impact on behavior or require any casting; the variables should not be used in any context which requires implicit conversion.
Project: http://git-wip-us.apache.org/repos/asf/lucy/repo Commit: http://git-wip-us.apache.org/repos/asf/lucy/commit/f0884c6a Tree: http://git-wip-us.apache.org/repos/asf/lucy/tree/f0884c6a Diff: http://git-wip-us.apache.org/repos/asf/lucy/diff/f0884c6a Branch: refs/heads/master Commit: f0884c6ac873f65432a1f92e4c475ff488db21cb Parents: e2235fe Author: Marvin Humphrey <[email protected]> Authored: Tue Mar 29 18:59:11 2016 -0700 Committer: Marvin Humphrey <[email protected]> Committed: Tue Mar 29 21:04:54 2016 -0700 ---------------------------------------------------------------------- core/Lucy/Analysis/PolyAnalyzer.c | 4 ++-- core/Lucy/Highlight/HeatMap.c | 28 ++++++++++++++-------------- core/Lucy/Index/DeletionsReader.c | 4 ++-- core/Lucy/Index/DeletionsWriter.c | 4 ++-- core/Lucy/Index/FilePurger.c | 6 +++--- core/Lucy/Index/Indexer.c | 4 ++-- core/Lucy/Index/PolyLexicon.c | 3 +-- core/Lucy/Index/PolyReader.c | 4 ++-- core/Lucy/Index/Segment.c | 4 ++-- core/Lucy/Plan/Schema.c | 10 +++++----- core/Lucy/Search/ANDQuery.c | 8 ++++---- core/Lucy/Search/ORQuery.c | 10 +++++----- core/Lucy/Search/PhraseQuery.c | 8 ++++---- core/Lucy/Search/PolyQuery.c | 8 ++++---- core/Lucy/Search/QueryParser.c | 18 +++++++++--------- core/Lucy/Util/SortExternal.c | 6 +++--- core/LucyX/Search/ProximityQuery.c | 8 ++++---- 17 files changed, 68 insertions(+), 69 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/lucy/blob/f0884c6a/core/Lucy/Analysis/PolyAnalyzer.c ---------------------------------------------------------------------- diff --git a/core/Lucy/Analysis/PolyAnalyzer.c b/core/Lucy/Analysis/PolyAnalyzer.c index ac04bad..695c2d7 100644 --- a/core/Lucy/Analysis/PolyAnalyzer.c +++ b/core/Lucy/Analysis/PolyAnalyzer.c @@ -87,7 +87,7 @@ PolyAnalyzer_Transform_IMP(PolyAnalyzer *self, Inversion *inversion) { Inversion* PolyAnalyzer_Transform_Text_IMP(PolyAnalyzer *self, String *text) { Vector *const analyzers = PolyAnalyzer_IVARS(self)->analyzers; - const uint32_t num_analyzers = Vec_Get_Size(analyzers); + const size_t num_analyzers = Vec_Get_Size(analyzers); Inversion *retval; if (num_analyzers == 0) { @@ -100,7 +100,7 @@ PolyAnalyzer_Transform_Text_IMP(PolyAnalyzer *self, String *text) { else { Analyzer *first_analyzer = (Analyzer*)Vec_Fetch(analyzers, 0); retval = Analyzer_Transform_Text(first_analyzer, text); - for (uint32_t i = 1; i < num_analyzers; i++) { + for (size_t i = 1; i < num_analyzers; i++) { Analyzer *analyzer = (Analyzer*)Vec_Fetch(analyzers, i); Inversion *new_inversion = Analyzer_Transform(analyzer, retval); DECREF(retval); http://git-wip-us.apache.org/repos/asf/lucy/blob/f0884c6a/core/Lucy/Highlight/HeatMap.c ---------------------------------------------------------------------- diff --git a/core/Lucy/Highlight/HeatMap.c b/core/Lucy/Highlight/HeatMap.c index 621a18d..8700fa3 100644 --- a/core/Lucy/Highlight/HeatMap.c +++ b/core/Lucy/Highlight/HeatMap.c @@ -65,19 +65,19 @@ S_compare_i32(const void *va, const void *vb) { // offsets and lengths... but leave the scores at 0. static Vector* S_flattened_but_empty_spans(Vector *spans) { - const uint32_t num_spans = Vec_Get_Size(spans); + const size_t num_spans = Vec_Get_Size(spans); int32_t *bounds = (int32_t*)MALLOCATE((num_spans * 2) * sizeof(int32_t)); // Assemble a list of all unique start/end boundaries. - for (uint32_t i = 0; i < num_spans; i++) { + for (size_t i = 0; i < num_spans; i++) { Span *span = (Span*)Vec_Fetch(spans, i); bounds[i] = Span_Get_Offset(span); bounds[i + num_spans] = Span_Get_Offset(span) + Span_Get_Length(span); } qsort(bounds, num_spans * 2, sizeof(uint32_t), S_compare_i32); - uint32_t num_bounds = 0; + size_t num_bounds = 0; int32_t last = INT32_MAX; - for (uint32_t i = 0; i < num_spans * 2; i++) { + for (size_t i = 0; i < num_spans * 2; i++) { if (bounds[i] != last) { bounds[num_bounds++] = bounds[i]; last = bounds[i]; @@ -86,7 +86,7 @@ S_flattened_but_empty_spans(Vector *spans) { // Create one Span for each zone between two bounds. Vector *flattened = Vec_new(num_bounds - 1); - for (uint32_t i = 0; i < num_bounds - 1; i++) { + for (size_t i = 0; i < num_bounds - 1; i++) { int32_t start = bounds[i]; int32_t length = bounds[i + 1] - start; Vec_Push(flattened, (Obj*)Span_new(start, length, 0.0f)); @@ -98,7 +98,7 @@ S_flattened_but_empty_spans(Vector *spans) { Vector* HeatMap_Flatten_Spans_IMP(HeatMap *self, Vector *spans) { - const uint32_t num_spans = Vec_Get_Size(spans); + const size_t num_spans = Vec_Get_Size(spans); UNUSED_VAR(self); if (!num_spans) { @@ -106,12 +106,12 @@ HeatMap_Flatten_Spans_IMP(HeatMap *self, Vector *spans) { } else { Vector *flattened = S_flattened_but_empty_spans(spans); - const uint32_t num_raw_flattened = Vec_Get_Size(flattened); + const size_t num_raw_flattened = Vec_Get_Size(flattened); // Iterate over each of the source spans, contributing their scores to // any destination span that falls within range. - uint32_t dest_tick = 0; - for (uint32_t i = 0; i < num_spans; i++) { + size_t dest_tick = 0; + for (size_t i = 0; i < num_spans; i++) { Span *source_span = (Span*)Vec_Fetch(spans, i); int32_t source_span_offset = Span_Get_Offset(source_span); int32_t source_span_len = Span_Get_Length(source_span); @@ -127,7 +127,7 @@ HeatMap_Flatten_Spans_IMP(HeatMap *self, Vector *spans) { } // Fill in scores. - for (uint32_t j = dest_tick; j < num_raw_flattened; j++) { + for (size_t j = dest_tick; j < num_raw_flattened; j++) { Span *dest_span = (Span*)Vec_Fetch(flattened, j); if (Span_Get_Offset(dest_span) == source_span_end) { break; @@ -142,7 +142,7 @@ HeatMap_Flatten_Spans_IMP(HeatMap *self, Vector *spans) { // Leave holes instead of spans that don't have any score. dest_tick = 0; - for (uint32_t i = 0; i < num_raw_flattened; i++) { + for (size_t i = 0; i < num_raw_flattened; i++) { Span *span = (Span*)Vec_Fetch(flattened, i); if (Span_Get_Weight(span)) { Vec_Store(flattened, dest_tick++, INCREF(span)); @@ -180,13 +180,13 @@ HeatMap_Calc_Proximity_Boost_IMP(HeatMap *self, Span *span1, Span *span2) { Vector* HeatMap_Generate_Proximity_Boosts_IMP(HeatMap *self, Vector *spans) { Vector *boosts = Vec_new(0); - const uint32_t num_spans = Vec_Get_Size(spans); + const size_t num_spans = Vec_Get_Size(spans); if (num_spans > 1) { - for (uint32_t i = 0, max = num_spans - 1; i < max; i++) { + for (size_t i = 0, max = num_spans - 1; i < max; i++) { Span *span1 = (Span*)Vec_Fetch(spans, i); - for (uint32_t j = i + 1; j <= max; j++) { + for (size_t j = i + 1; j <= max; j++) { Span *span2 = (Span*)Vec_Fetch(spans, j); float prox_score = HeatMap_Calc_Proximity_Boost(self, span1, span2); http://git-wip-us.apache.org/repos/asf/lucy/blob/f0884c6a/core/Lucy/Index/DeletionsReader.c ---------------------------------------------------------------------- diff --git a/core/Lucy/Index/DeletionsReader.c b/core/Lucy/Index/DeletionsReader.c index 2231024..52eb96b 100644 --- a/core/Lucy/Index/DeletionsReader.c +++ b/core/Lucy/Index/DeletionsReader.c @@ -102,9 +102,9 @@ PolyDelReader_Iterator_IMP(PolyDeletionsReader *self) { PolyDeletionsReaderIVARS *const ivars = PolyDelReader_IVARS(self); SeriesMatcher *deletions = NULL; if (ivars->del_count) { - uint32_t num_readers = Vec_Get_Size(ivars->readers); + size_t num_readers = Vec_Get_Size(ivars->readers); Vector *matchers = Vec_new(num_readers); - for (uint32_t i = 0; i < num_readers; i++) { + for (size_t i = 0; i < num_readers; i++) { DeletionsReader *reader = (DeletionsReader*)Vec_Fetch(ivars->readers, i); Matcher *matcher = DelReader_Iterator(reader); http://git-wip-us.apache.org/repos/asf/lucy/blob/f0884c6a/core/Lucy/Index/DeletionsWriter.c ---------------------------------------------------------------------- diff --git a/core/Lucy/Index/DeletionsWriter.c b/core/Lucy/Index/DeletionsWriter.c index 23de833..1b25462 100644 --- a/core/Lucy/Index/DeletionsWriter.c +++ b/core/Lucy/Index/DeletionsWriter.c @@ -85,7 +85,7 @@ DefDelWriter_init(DefaultDeletionsWriter *self, Schema *schema, DataWriter_init((DataWriter*)self, schema, snapshot, segment, polyreader); DefaultDeletionsWriterIVARS *const ivars = DefDelWriter_IVARS(self); ivars->seg_readers = PolyReader_Seg_Readers(polyreader); - uint32_t num_seg_readers = Vec_Get_Size(ivars->seg_readers); + size_t num_seg_readers = Vec_Get_Size(ivars->seg_readers); ivars->seg_starts = PolyReader_Offsets(polyreader); ivars->bit_vecs = Vec_new(num_seg_readers); ivars->updated = (bool*)CALLOCATE(num_seg_readers, sizeof(bool)); @@ -93,7 +93,7 @@ DefDelWriter_init(DefaultDeletionsWriter *self, Schema *schema, ivars->name_to_tick = Hash_new(num_seg_readers); // Materialize a BitVector of deletions for each segment. - for (uint32_t i = 0; i < num_seg_readers; i++) { + for (size_t i = 0; i < num_seg_readers; i++) { SegReader *seg_reader = (SegReader*)Vec_Fetch(ivars->seg_readers, i); BitVector *bit_vec = BitVec_new(SegReader_Doc_Max(seg_reader)); DeletionsReader *del_reader http://git-wip-us.apache.org/repos/asf/lucy/blob/f0884c6a/core/Lucy/Index/FilePurger.c ---------------------------------------------------------------------- diff --git a/core/Lucy/Index/FilePurger.c b/core/Lucy/Index/FilePurger.c index 0278434..8c71678 100644 --- a/core/Lucy/Index/FilePurger.c +++ b/core/Lucy/Index/FilePurger.c @@ -230,9 +230,9 @@ S_discover_unused(FilePurger *self, Vector **purgables_ptr, if (lock && Lock_Is_Locked(lock)) { // The snapshot file is locked, which means someone's using // that version of the index -- protect all of its entries. - uint32_t new_size = Vec_Get_Size(spared) - + Vec_Get_Size(referenced) - + 1; + size_t new_size = Vec_Get_Size(spared) + + Vec_Get_Size(referenced) + + 1; Vec_Grow(spared, new_size); Vec_Push(spared, (Obj*)Str_Clone(entry)); Vec_Push_All(spared, referenced); http://git-wip-us.apache.org/repos/asf/lucy/blob/f0884c6a/core/Lucy/Index/Indexer.c ---------------------------------------------------------------------- diff --git a/core/Lucy/Index/Indexer.c b/core/Lucy/Index/Indexer.c index 1cf4df5..977cf38 100644 --- a/core/Lucy/Index/Indexer.c +++ b/core/Lucy/Index/Indexer.c @@ -400,7 +400,7 @@ static bool S_maybe_merge(Indexer *self, Vector *seg_readers) { IndexerIVARS *const ivars = Indexer_IVARS(self); bool merge_happened = false; - uint32_t num_seg_readers = Vec_Get_Size(seg_readers); + size_t num_seg_readers = Vec_Get_Size(seg_readers); Lock *merge_lock = IxManager_Make_Merge_Lock(ivars->manager); bool got_merge_lock = Lock_Obtain(merge_lock); int64_t cutoff; @@ -484,7 +484,7 @@ void Indexer_Prepare_Commit_IMP(Indexer *self) { IndexerIVARS *const ivars = Indexer_IVARS(self); Vector *seg_readers = PolyReader_Get_Seg_Readers(ivars->polyreader); - uint32_t num_seg_readers = Vec_Get_Size(seg_readers); + size_t num_seg_readers = Vec_Get_Size(seg_readers); bool merge_happened = false; if (!ivars->write_lock || ivars->prepared) { http://git-wip-us.apache.org/repos/asf/lucy/blob/f0884c6a/core/Lucy/Index/PolyLexicon.c ---------------------------------------------------------------------- diff --git a/core/Lucy/Index/PolyLexicon.c b/core/Lucy/Index/PolyLexicon.c index 21bd73e..5c414a5 100644 --- a/core/Lucy/Index/PolyLexicon.c +++ b/core/Lucy/Index/PolyLexicon.c @@ -95,7 +95,6 @@ void PolyLex_Reset_IMP(PolyLexicon *self) { PolyLexiconIVARS *const ivars = PolyLex_IVARS(self); Vector *seg_lexicons = ivars->seg_lexicons; - uint32_t num_segs = Vec_Get_Size(seg_lexicons); SegLexQueue *lex_q = ivars->lex_q; // Empty out the queue. @@ -106,7 +105,7 @@ PolyLex_Reset_IMP(PolyLexicon *self) { } // Fill the queue with valid SegLexicons. - for (uint32_t i = 0; i < num_segs; i++) { + for (size_t i = 0, max = Vec_Get_Size(seg_lexicons); i < max; i++) { SegLexicon *const seg_lexicon = (SegLexicon*)Vec_Fetch(seg_lexicons, i); SegLex_Reset(seg_lexicon); http://git-wip-us.apache.org/repos/asf/lucy/blob/f0884c6a/core/Lucy/Index/PolyReader.c ---------------------------------------------------------------------- diff --git a/core/Lucy/Index/PolyReader.c b/core/Lucy/Index/PolyReader.c index 7cd8a22..aa85e7b 100644 --- a/core/Lucy/Index/PolyReader.c +++ b/core/Lucy/Index/PolyReader.c @@ -164,9 +164,9 @@ PolyReader_init(PolyReader *self, Schema *schema, Folder *folder, ivars->del_count = 0; if (sub_readers) { - uint32_t num_segs = Vec_Get_Size(sub_readers); + size_t num_segs = Vec_Get_Size(sub_readers); Vector *segments = Vec_new(num_segs); - for (uint32_t i = 0; i < num_segs; i++) { + for (size_t i = 0; i < num_segs; i++) { SegReader *seg_reader = (SegReader*)CERTIFY(Vec_Fetch(sub_readers, i), SEGREADER); Vec_Push(segments, INCREF(SegReader_Get_Segment(seg_reader))); http://git-wip-us.apache.org/repos/asf/lucy/blob/f0884c6a/core/Lucy/Index/Segment.c ---------------------------------------------------------------------- diff --git a/core/Lucy/Index/Segment.c b/core/Lucy/Index/Segment.c index 660c76a..fb8a66f 100644 --- a/core/Lucy/Index/Segment.c +++ b/core/Lucy/Index/Segment.c @@ -119,7 +119,7 @@ Seg_Read_File_IMP(Segment *self, Folder *folder) { // Get list of field nums. Vector *source_by_num = (Vector*)Hash_Fetch_Utf8(my_metadata, "field_names", 11); - uint32_t num_fields = source_by_num ? Vec_Get_Size(source_by_num) : 0; + size_t num_fields = source_by_num ? Vec_Get_Size(source_by_num) : 0; if (source_by_num == NULL) { THROW(ERR, "Failed to extract 'field_names' from metadata"); } @@ -131,7 +131,7 @@ Seg_Read_File_IMP(Segment *self, Folder *folder) { ivars->by_name = Hash_new(num_fields); // Copy the list of fields from the source. - for (uint32_t i = 0; i < num_fields; i++) { + for (size_t i = 0; i < num_fields; i++) { String *name = (String*)Vec_Fetch(source_by_num, i); Seg_Add_Field(self, name); } http://git-wip-us.apache.org/repos/asf/lucy/blob/f0884c6a/core/Lucy/Plan/Schema.c ---------------------------------------------------------------------- diff --git a/core/Lucy/Plan/Schema.c b/core/Lucy/Plan/Schema.c index bfcbebe..2da6716 100644 --- a/core/Lucy/Plan/Schema.c +++ b/core/Lucy/Plan/Schema.c @@ -229,9 +229,9 @@ Schema_All_Fields_IMP(Schema *self) { return Hash_Keys(Schema_IVARS(self)->types); } -uint32_t +size_t S_find_in_array(Vector *array, Obj *obj) { - for (uint32_t i = 0, max = Vec_Get_Size(array); i < max; i++) { + for (size_t i = 0, max = Vec_Get_Size(array); i < max; i++) { Obj *candidate = Vec_Fetch(array, i); if (obj == NULL && candidate == NULL) { return i; @@ -245,7 +245,7 @@ S_find_in_array(Vector *array, Obj *obj) { } } THROW(ERR, "Couldn't find match for %o", obj); - UNREACHABLE_RETURN(uint32_t); + UNREACHABLE_RETURN(size_t); } Hash* @@ -273,12 +273,12 @@ Schema_Dump_IMP(Schema *self) { FullTextType *fttype = (FullTextType*)type; Hash *type_dump = FullTextType_Dump_For_Schema(fttype); Analyzer *analyzer = FullTextType_Get_Analyzer(fttype); - uint32_t tick + size_t tick = S_find_in_array(ivars->uniq_analyzers, (Obj*)analyzer); // Store the tick which references a unique analyzer. Hash_Store_Utf8(type_dump, "analyzer", 8, - (Obj*)Str_newf("%u32", tick)); + (Obj*)Str_newf("%u64", (uint64_t)tick)); Hash_Store(type_dumps, field, (Obj*)type_dump); } http://git-wip-us.apache.org/repos/asf/lucy/blob/f0884c6a/core/Lucy/Search/ANDQuery.c ---------------------------------------------------------------------- diff --git a/core/Lucy/Search/ANDQuery.c b/core/Lucy/Search/ANDQuery.c index 3178c42..470cac6 100644 --- a/core/Lucy/Search/ANDQuery.c +++ b/core/Lucy/Search/ANDQuery.c @@ -46,12 +46,12 @@ ANDQuery_init(ANDQuery *self, Vector *children) { String* ANDQuery_To_String_IMP(ANDQuery *self) { ANDQueryIVARS *const ivars = ANDQuery_IVARS(self); - uint32_t num_kids = Vec_Get_Size(ivars->children); + size_t num_kids = Vec_Get_Size(ivars->children); if (!num_kids) { return Str_new_from_trusted_utf8("()", 2); } else { CharBuf *buf = CB_new(0); CB_Cat_Trusted_Utf8(buf, "(", 1); - for (uint32_t i = 0; i < num_kids; i++) { + for (size_t i = 0; i < num_kids; i++) { String *kid_string = Obj_To_String(Vec_Fetch(ivars->children, i)); CB_Cat(buf, kid_string); DECREF(kid_string); @@ -108,7 +108,7 @@ Matcher* ANDCompiler_Make_Matcher_IMP(ANDCompiler *self, SegReader *reader, bool need_score) { ANDCompilerIVARS *const ivars = ANDCompiler_IVARS(self); - uint32_t num_kids = Vec_Get_Size(ivars->children); + size_t num_kids = Vec_Get_Size(ivars->children); if (num_kids == 1) { Compiler *only_child = (Compiler*)Vec_Fetch(ivars->children, 0); @@ -118,7 +118,7 @@ ANDCompiler_Make_Matcher_IMP(ANDCompiler *self, SegReader *reader, Vector *child_matchers = Vec_new(num_kids); // Add child matchers one by one. - for (uint32_t i = 0; i < num_kids; i++) { + for (size_t i = 0; i < num_kids; i++) { Compiler *child = (Compiler*)Vec_Fetch(ivars->children, i); Matcher *child_matcher = Compiler_Make_Matcher(child, reader, need_score); http://git-wip-us.apache.org/repos/asf/lucy/blob/f0884c6a/core/Lucy/Search/ORQuery.c ---------------------------------------------------------------------- diff --git a/core/Lucy/Search/ORQuery.c b/core/Lucy/Search/ORQuery.c index 9b34d85..4a6fb32 100644 --- a/core/Lucy/Search/ORQuery.c +++ b/core/Lucy/Search/ORQuery.c @@ -61,13 +61,13 @@ ORQuery_Equals_IMP(ORQuery *self, Obj *other) { String* ORQuery_To_String_IMP(ORQuery *self) { ORQueryIVARS *const ivars = ORQuery_IVARS(self); - uint32_t num_kids = Vec_Get_Size(ivars->children); + size_t num_kids = Vec_Get_Size(ivars->children); if (!num_kids) { return Str_new_from_trusted_utf8("()", 2); } else { CharBuf *buf = CB_new(0); CB_Cat_Trusted_Utf8(buf, "(", 1); - uint32_t last_kid = num_kids - 1; - for (uint32_t i = 0; i < num_kids; i++) { + size_t last_kid = num_kids - 1; + for (size_t i = 0; i < num_kids; i++) { String *kid_string = Obj_To_String(Vec_Fetch(ivars->children, i)); CB_Cat(buf, kid_string); DECREF(kid_string); @@ -104,7 +104,7 @@ Matcher* ORCompiler_Make_Matcher_IMP(ORCompiler *self, SegReader *reader, bool need_score) { ORCompilerIVARS *const ivars = ORCompiler_IVARS(self); - uint32_t num_kids = Vec_Get_Size(ivars->children); + size_t num_kids = Vec_Get_Size(ivars->children); if (num_kids == 1) { // No need for an ORMatcher wrapper. @@ -116,7 +116,7 @@ ORCompiler_Make_Matcher_IMP(ORCompiler *self, SegReader *reader, uint32_t num_submatchers = 0; // Accumulate sub-matchers. - for (uint32_t i = 0; i < num_kids; i++) { + for (size_t i = 0; i < num_kids; i++) { Compiler *child = (Compiler*)Vec_Fetch(ivars->children, i); Matcher *submatcher = Compiler_Make_Matcher(child, reader, need_score); http://git-wip-us.apache.org/repos/asf/lucy/blob/f0884c6a/core/Lucy/Search/PhraseQuery.c ---------------------------------------------------------------------- diff --git a/core/Lucy/Search/PhraseQuery.c b/core/Lucy/Search/PhraseQuery.c index e8ddeea..f40cdde 100644 --- a/core/Lucy/Search/PhraseQuery.c +++ b/core/Lucy/Search/PhraseQuery.c @@ -138,11 +138,11 @@ PhraseQuery_Equals_IMP(PhraseQuery *self, Obj *other) { String* PhraseQuery_To_String_IMP(PhraseQuery *self) { PhraseQueryIVARS *const ivars = PhraseQuery_IVARS(self); - uint32_t num_terms = Vec_Get_Size(ivars->terms); + size_t num_terms = Vec_Get_Size(ivars->terms); CharBuf *buf = CB_new(0); CB_Cat(buf, ivars->field); CB_Cat_Trusted_Utf8(buf, ":\"", 2); - for (uint32_t i = 0; i < num_terms; i++) { + for (size_t i = 0; i < num_terms; i++) { Obj *term = Vec_Fetch(ivars->terms, i); String *term_string = Obj_To_String(term); CB_Cat(buf, term_string); @@ -300,7 +300,7 @@ PhraseCompiler_Make_Matcher_IMP(PhraseCompiler *self, SegReader *reader, PhraseQueryIVARS *const parent_ivars = PhraseQuery_IVARS((PhraseQuery*)ivars->parent); Vector *const terms = parent_ivars->terms; - uint32_t num_terms = Vec_Get_Size(terms); + size_t num_terms = Vec_Get_Size(terms); // Bail if there are no terms. if (!num_terms) { return NULL; } @@ -322,7 +322,7 @@ PhraseCompiler_Make_Matcher_IMP(PhraseCompiler *self, SegReader *reader, // Look up each term. Vector *plists = Vec_new(num_terms); - for (uint32_t i = 0; i < num_terms; i++) { + for (size_t i = 0; i < num_terms; i++) { Obj *term = Vec_Fetch(terms, i); PostingList *plist = PListReader_Posting_List(plist_reader, parent_ivars->field, term); http://git-wip-us.apache.org/repos/asf/lucy/blob/f0884c6a/core/Lucy/Search/PolyQuery.c ---------------------------------------------------------------------- diff --git a/core/Lucy/Search/PolyQuery.c b/core/Lucy/Search/PolyQuery.c index c6ae993..d3d839c 100644 --- a/core/Lucy/Search/PolyQuery.c +++ b/core/Lucy/Search/PolyQuery.c @@ -30,11 +30,11 @@ PolyQuery* PolyQuery_init(PolyQuery *self, Vector *children) { - const uint32_t num_kids = children ? Vec_Get_Size(children) : 0; + const size_t num_kids = children ? Vec_Get_Size(children) : 0; Query_init((Query*)self, 1.0f); PolyQueryIVARS *const ivars = PolyQuery_IVARS(self); ivars->children = Vec_new(num_kids); - for (uint32_t i = 0; i < num_kids; i++) { + for (size_t i = 0; i < num_kids; i++) { PolyQuery_Add_Child(self, (Query*)Vec_Fetch(children, i)); } return self; @@ -134,13 +134,13 @@ PolyCompiler_init(PolyCompiler *self, PolyQuery *parent, Searcher *searcher, float boost) { PolyCompilerIVARS *const ivars = PolyCompiler_IVARS(self); PolyQueryIVARS *const parent_ivars = PolyQuery_IVARS(parent); - const uint32_t num_kids = Vec_Get_Size(parent_ivars->children); + const size_t num_kids = Vec_Get_Size(parent_ivars->children); Compiler_init((Compiler*)self, (Query*)parent, searcher, NULL, boost); ivars->children = Vec_new(num_kids); // Iterate over the children, creating a Compiler for each one. - for (uint32_t i = 0; i < num_kids; i++) { + for (size_t i = 0; i < num_kids; i++) { Query *child_query = (Query*)Vec_Fetch(parent_ivars->children, i); float sub_boost = boost * Query_Get_Boost(child_query); Compiler *child_compiler http://git-wip-us.apache.org/repos/asf/lucy/blob/f0884c6a/core/Lucy/Search/QueryParser.c ---------------------------------------------------------------------- diff --git a/core/Lucy/Search/QueryParser.c b/core/Lucy/Search/QueryParser.c index 3042231..f17e131 100644 --- a/core/Lucy/Search/QueryParser.c +++ b/core/Lucy/Search/QueryParser.c @@ -127,9 +127,9 @@ QParser_init(QueryParser *self, Schema *schema, Analyzer *analyzer, } else { Vector *all_fields = Schema_All_Fields(schema); - uint32_t num_fields = Vec_Get_Size(all_fields); + size_t num_fields = Vec_Get_Size(all_fields); ivars->fields = Vec_new(num_fields); - for (uint32_t i = 0; i < num_fields; i++) { + for (size_t i = 0; i < num_fields; i++) { String *field = (String*)Vec_Fetch(all_fields, i); FieldType *type = Schema_Fetch_Type(schema, field); if (type && FType_Indexed(type)) { @@ -569,7 +569,7 @@ S_compose_subquery(QueryParser *self, Vector *elems, bool enclosed) { retval = (Query*)INCREF(query); } else { - uint32_t num_elems = Vec_Get_Size(elems); + size_t num_elems = Vec_Get_Size(elems); Vector *required = Vec_new(num_elems); Vector *optional = Vec_new(num_elems); Vector *negated = Vec_new(num_elems); @@ -577,7 +577,7 @@ S_compose_subquery(QueryParser *self, Vector *elems, bool enclosed) { Query *opt_query = NULL; // Demux elems into bins. - for (uint32_t i = 0; i < num_elems; i++) { + for (size_t i = 0; i < num_elems; i++) { ParserElem *elem = (ParserElem*)Vec_Fetch(elems, i); if (ParserElem_Required(elem)) { Vec_Push(required, INCREF(ParserElem_As(elem, QUERY))); @@ -589,9 +589,9 @@ S_compose_subquery(QueryParser *self, Vector *elems, bool enclosed) { Vec_Push(negated, INCREF(ParserElem_As(elem, QUERY))); } } - uint32_t num_required = Vec_Get_Size(required); - uint32_t num_negated = Vec_Get_Size(negated); - uint32_t num_optional = Vec_Get_Size(optional); + size_t num_required = Vec_Get_Size(required); + size_t num_negated = Vec_Get_Size(negated); + size_t num_optional = Vec_Get_Size(optional); // Bind all mandatory matchers together in one Query. if (num_required || num_negated) { @@ -914,11 +914,11 @@ QParser_Expand_Leaf_IMP(QueryParser *self, Query *query) { // Extract token texts. String *split_source = S_unescape(self, source_text, unescape_buf); Vector *maybe_texts = Analyzer_Split(analyzer, split_source); - uint32_t num_maybe_texts = Vec_Get_Size(maybe_texts); + size_t num_maybe_texts = Vec_Get_Size(maybe_texts); Vector *token_texts = Vec_new(num_maybe_texts); // Filter out zero-length token texts. - for (uint32_t j = 0; j < num_maybe_texts; j++) { + for (size_t j = 0; j < num_maybe_texts; j++) { String *token_text = (String*)Vec_Fetch(maybe_texts, j); if (Str_Get_Size(token_text)) { Vec_Push(token_texts, INCREF(token_text)); http://git-wip-us.apache.org/repos/asf/lucy/blob/f0884c6a/core/Lucy/Util/SortExternal.c ---------------------------------------------------------------------- diff --git a/core/Lucy/Util/SortExternal.c b/core/Lucy/Util/SortExternal.c index a9b923c..ee520a6 100644 --- a/core/Lucy/Util/SortExternal.c +++ b/core/Lucy/Util/SortExternal.c @@ -163,7 +163,7 @@ void SortEx_Add_Run_IMP(SortExternal *self, SortExternal *run) { SortExternalIVARS *const ivars = SortEx_IVARS(self); Vec_Push(ivars->runs, (Obj*)run); - uint32_t num_runs = Vec_Get_Size(ivars->runs); + size_t num_runs = Vec_Get_Size(ivars->runs); ivars->slice_sizes = (uint32_t*)REALLOCATE(ivars->slice_sizes, num_runs * sizeof(uint32_t)); @@ -262,7 +262,7 @@ S_find_endpost(SortExternal *self, SortExternalIVARS *ivars) { static void S_absorb_slices(SortExternal *self, SortExternalIVARS *ivars, Obj **endpost) { - uint32_t num_runs = Vec_Get_Size(ivars->runs); + size_t num_runs = Vec_Get_Size(ivars->runs); Obj ***slice_starts = ivars->slice_starts; uint32_t *slice_sizes = ivars->slice_sizes; Class *klass = SortEx_get_class(self); @@ -273,7 +273,7 @@ S_absorb_slices(SortExternal *self, SortExternalIVARS *ivars, // Find non-empty slices. uint32_t num_slices = 0; uint32_t total_size = 0; - for (uint32_t i = 0; i < num_runs; i++) { + for (size_t i = 0; i < num_runs; i++) { SortExternal *const run = (SortExternal*)Vec_Fetch(ivars->runs, i); SortExternalIVARS *const run_ivars = SortEx_IVARS(run); uint32_t slice_size = S_find_slice_size(run, run_ivars, endpost); http://git-wip-us.apache.org/repos/asf/lucy/blob/f0884c6a/core/LucyX/Search/ProximityQuery.c ---------------------------------------------------------------------- diff --git a/core/LucyX/Search/ProximityQuery.c b/core/LucyX/Search/ProximityQuery.c index e4c4fb5..10e2ec3 100644 --- a/core/LucyX/Search/ProximityQuery.c +++ b/core/LucyX/Search/ProximityQuery.c @@ -150,11 +150,11 @@ ProximityQuery_Equals_IMP(ProximityQuery *self, Obj *other) { String* ProximityQuery_To_String_IMP(ProximityQuery *self) { ProximityQueryIVARS *const ivars = ProximityQuery_IVARS(self); - uint32_t num_terms = Vec_Get_Size(ivars->terms); + size_t num_terms = Vec_Get_Size(ivars->terms); CharBuf *buf = CB_new(0); CB_Cat(buf, ivars->field); CB_Cat_Trusted_Utf8(buf, ":\"", 2); - for (uint32_t i = 0; i < num_terms; i++) { + for (size_t i = 0; i < num_terms; i++) { Obj *term = Vec_Fetch(ivars->terms, i); String *term_string = Obj_To_String(term); CB_Cat(buf, term_string); @@ -328,7 +328,7 @@ ProximityCompiler_Make_Matcher_IMP(ProximityCompiler *self, SegReader *reader, ProximityQueryIVARS *const parent_ivars = ProximityQuery_IVARS((ProximityQuery*)ivars->parent); Vector *const terms = parent_ivars->terms; - uint32_t num_terms = Vec_Get_Size(terms); + size_t num_terms = Vec_Get_Size(terms); // Bail if there are no terms. if (!num_terms) { return NULL; } @@ -350,7 +350,7 @@ ProximityCompiler_Make_Matcher_IMP(ProximityCompiler *self, SegReader *reader, // Look up each term. Vector *plists = Vec_new(num_terms); - for (uint32_t i = 0; i < num_terms; i++) { + for (size_t i = 0; i < num_terms; i++) { Obj *term = Vec_Fetch(terms, i); PostingList *plist = PListReader_Posting_List(plist_reader, parent_ivars->field, term);
