Github user myui commented on a diff in the pull request:

    https://github.com/apache/incubator-hivemall/pull/116#discussion_r141543643
  
    --- Diff: core/src/main/java/hivemall/embedding/CBoWModel.java ---
    @@ -0,0 +1,131 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one
    + * or more contributor license agreements.  See the NOTICE file
    + * distributed with this work for additional information
    + * regarding copyright ownership.  The ASF licenses this file
    + * to you under the Apache License, Version 2.0 (the
    + * "License"); you may not use this file except in compliance
    + * with the License.  You may obtain a copy of the License at
    + *
    + *   http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing,
    + * software distributed under the License is distributed on an
    + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
    + * KIND, either express or implied.  See the License for the
    + * specific language governing permissions and limitations
    + * under the License.
    + */
    +package hivemall.embedding;
    +
    +import hivemall.math.random.PRNG;
    +import hivemall.utils.collections.maps.Int2FloatOpenHashTable;
    +
    +import javax.annotation.Nonnull;
    +import java.util.List;
    +
    +public final class CBoWModel extends AbstractWord2VecModel {
    +    protected CBoWModel(final int dim, final int win, final int neg, final 
int iter,
    +            final float startingLR, final long numTrainWords, final 
Int2FloatOpenHashTable S,
    +            final int[] aliasWordId) {
    +        super(dim, win, neg, iter, startingLR, numTrainWords, S, 
aliasWordId);
    +    }
    +
    +    protected void trainOnDoc(@Nonnull final int[] doc) {
    +        final int vecDim = dim;
    +        final int numNegative = neg;
    +        final PRNG _rnd = rnd;
    +        final Int2FloatOpenHashTable _S = S;
    +        final int[] _aliasWordId = aliasWordId;
    +        float label, gradient;
    +
    +        // reuse instance
    +        int windowSize, k, numContext, targetWord, inWord, positiveWord;
    +
    +        updateLearningRate();
    +
    +        int docLength = doc.length;
    +        for (int t = 0; t < iter; t++) {
    +            for (int positiveWordPosition = 0; positiveWordPosition < 
docLength; positiveWordPosition++) {
    +                windowSize = _rnd.nextInt(win) + 1;
    +
    +                numContext = windowSize * 2 + Math.min(0, 
positiveWordPosition - windowSize)
    +                        + Math.min(0, docLength - positiveWordPosition - 
windowSize - 1);
    +
    +                float[] gradVec = new float[vecDim];
    --- End diff --
    
    add `final` for `gradVec` and `averageVec`.


---

Reply via email to