mirror of
https://github.com/Helium314/HeliBoard.git
synced 2025-06-21 00:20:52 +00:00
allow loading external gesture typing library
This commit is contained in:
parent
ac7ac4f907
commit
8123c79523
38 changed files with 187 additions and 61 deletions
|
@ -0,0 +1,406 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.android.inputmethod.keyboard;
|
||||
|
||||
import android.graphics.Rect;
|
||||
import android.util.Log;
|
||||
|
||||
import org.dslul.openboard.inputmethod.keyboard.Key;
|
||||
import org.dslul.openboard.inputmethod.keyboard.internal.TouchPositionCorrection;
|
||||
import org.dslul.openboard.inputmethod.latin.common.Constants;
|
||||
import org.dslul.openboard.inputmethod.latin.utils.JniUtils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
public class ProximityInfo {
|
||||
private static final String TAG = ProximityInfo.class.getSimpleName();
|
||||
private static final boolean DEBUG = false;
|
||||
|
||||
// Must be equal to MAX_PROXIMITY_CHARS_SIZE in native/jni/src/defines.h
|
||||
public static final int MAX_PROXIMITY_CHARS_SIZE = 16;
|
||||
/** Number of key widths from current touch point to search for nearest keys. */
|
||||
private static final float SEARCH_DISTANCE = 1.2f;
|
||||
@Nonnull
|
||||
private static final List<Key> EMPTY_KEY_LIST = Collections.emptyList();
|
||||
private static final float DEFAULT_TOUCH_POSITION_CORRECTION_RADIUS = 0.15f;
|
||||
|
||||
private final int mGridWidth;
|
||||
private final int mGridHeight;
|
||||
private final int mGridSize;
|
||||
private final int mCellWidth;
|
||||
private final int mCellHeight;
|
||||
// TODO: Find a proper name for mKeyboardMinWidth
|
||||
private final int mKeyboardMinWidth;
|
||||
private final int mKeyboardHeight;
|
||||
private final int mMostCommonKeyWidth;
|
||||
private final int mMostCommonKeyHeight;
|
||||
@Nonnull
|
||||
private final List<Key> mSortedKeys;
|
||||
@Nonnull
|
||||
private final List<Key>[] mGridNeighbors;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public ProximityInfo(final int gridWidth, final int gridHeight, final int minWidth, final int height,
|
||||
final int mostCommonKeyWidth, final int mostCommonKeyHeight,
|
||||
@Nonnull final List<Key> sortedKeys,
|
||||
@Nonnull final TouchPositionCorrection touchPositionCorrection) {
|
||||
mGridWidth = gridWidth;
|
||||
mGridHeight = gridHeight;
|
||||
mGridSize = mGridWidth * mGridHeight;
|
||||
mCellWidth = (minWidth + mGridWidth - 1) / mGridWidth;
|
||||
mCellHeight = (height + mGridHeight - 1) / mGridHeight;
|
||||
mKeyboardMinWidth = minWidth;
|
||||
mKeyboardHeight = height;
|
||||
mMostCommonKeyHeight = mostCommonKeyHeight;
|
||||
mMostCommonKeyWidth = mostCommonKeyWidth;
|
||||
mSortedKeys = sortedKeys;
|
||||
mGridNeighbors = new List[mGridSize];
|
||||
if (minWidth == 0 || height == 0) {
|
||||
// No proximity required. Keyboard might be more keys keyboard.
|
||||
return;
|
||||
}
|
||||
computeNearestNeighbors();
|
||||
mNativeProximityInfo = createNativeProximityInfo(touchPositionCorrection);
|
||||
}
|
||||
|
||||
private long mNativeProximityInfo;
|
||||
static {
|
||||
JniUtils.loadNativeLibrary();
|
||||
}
|
||||
|
||||
// TODO: Stop passing proximityCharsArray
|
||||
private static native long setProximityInfoNative(int displayWidth, int displayHeight,
|
||||
int gridWidth, int gridHeight, int mostCommonKeyWidth, int mostCommonKeyHeight,
|
||||
int[] proximityCharsArray, int keyCount, int[] keyXCoordinates, int[] keyYCoordinates,
|
||||
int[] keyWidths, int[] keyHeights, int[] keyCharCodes, float[] sweetSpotCenterXs,
|
||||
float[] sweetSpotCenterYs, float[] sweetSpotRadii);
|
||||
|
||||
private static native void releaseProximityInfoNative(long nativeProximityInfo);
|
||||
|
||||
public static boolean needsProximityInfo(final Key key) {
|
||||
// Don't include special keys into ProximityInfo.
|
||||
return key.getCode() >= Constants.CODE_SPACE;
|
||||
}
|
||||
|
||||
private static int getProximityInfoKeysCount(final List<Key> keys) {
|
||||
int count = 0;
|
||||
for (final Key key : keys) {
|
||||
if (needsProximityInfo(key)) {
|
||||
count++;
|
||||
}
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
private long createNativeProximityInfo(
|
||||
@Nonnull final TouchPositionCorrection touchPositionCorrection) {
|
||||
final List<Key>[] gridNeighborKeys = mGridNeighbors;
|
||||
final int[] proximityCharsArray = new int[mGridSize * MAX_PROXIMITY_CHARS_SIZE];
|
||||
Arrays.fill(proximityCharsArray, Constants.NOT_A_CODE);
|
||||
for (int i = 0; i < mGridSize; ++i) {
|
||||
final List<Key> neighborKeys = gridNeighborKeys[i];
|
||||
final int proximityCharsLength = neighborKeys.size();
|
||||
int infoIndex = i * MAX_PROXIMITY_CHARS_SIZE;
|
||||
for (int j = 0; j < proximityCharsLength; ++j) {
|
||||
final Key neighborKey = neighborKeys.get(j);
|
||||
// Excluding from proximityCharsArray
|
||||
if (!needsProximityInfo(neighborKey)) {
|
||||
continue;
|
||||
}
|
||||
proximityCharsArray[infoIndex] = neighborKey.getCode();
|
||||
infoIndex++;
|
||||
}
|
||||
}
|
||||
if (DEBUG) {
|
||||
final StringBuilder sb = new StringBuilder();
|
||||
for (int i = 0; i < mGridSize; i++) {
|
||||
sb.setLength(0);
|
||||
for (int j = 0; j < MAX_PROXIMITY_CHARS_SIZE; j++) {
|
||||
final int code = proximityCharsArray[i * MAX_PROXIMITY_CHARS_SIZE + j];
|
||||
if (code == Constants.NOT_A_CODE) {
|
||||
break;
|
||||
}
|
||||
if (sb.length() > 0) sb.append(" ");
|
||||
sb.append(Constants.printableCode(code));
|
||||
}
|
||||
Log.d(TAG, "proxmityChars["+i+"]: " + sb);
|
||||
}
|
||||
}
|
||||
|
||||
final List<Key> sortedKeys = mSortedKeys;
|
||||
final int keyCount = getProximityInfoKeysCount(sortedKeys);
|
||||
final int[] keyXCoordinates = new int[keyCount];
|
||||
final int[] keyYCoordinates = new int[keyCount];
|
||||
final int[] keyWidths = new int[keyCount];
|
||||
final int[] keyHeights = new int[keyCount];
|
||||
final int[] keyCharCodes = new int[keyCount];
|
||||
final float[] sweetSpotCenterXs;
|
||||
final float[] sweetSpotCenterYs;
|
||||
final float[] sweetSpotRadii;
|
||||
|
||||
for (int infoIndex = 0, keyIndex = 0; keyIndex < sortedKeys.size(); keyIndex++) {
|
||||
final Key key = sortedKeys.get(keyIndex);
|
||||
// Excluding from key coordinate arrays
|
||||
if (!needsProximityInfo(key)) {
|
||||
continue;
|
||||
}
|
||||
keyXCoordinates[infoIndex] = key.getX();
|
||||
keyYCoordinates[infoIndex] = key.getY();
|
||||
keyWidths[infoIndex] = key.getWidth();
|
||||
keyHeights[infoIndex] = key.getHeight();
|
||||
keyCharCodes[infoIndex] = key.getCode();
|
||||
infoIndex++;
|
||||
}
|
||||
|
||||
if (touchPositionCorrection.isValid()) {
|
||||
if (DEBUG) {
|
||||
Log.d(TAG, "touchPositionCorrection: ON");
|
||||
}
|
||||
sweetSpotCenterXs = new float[keyCount];
|
||||
sweetSpotCenterYs = new float[keyCount];
|
||||
sweetSpotRadii = new float[keyCount];
|
||||
final int rows = touchPositionCorrection.getRows();
|
||||
final float defaultRadius = DEFAULT_TOUCH_POSITION_CORRECTION_RADIUS
|
||||
* (float)Math.hypot(mMostCommonKeyWidth, mMostCommonKeyHeight);
|
||||
for (int infoIndex = 0, keyIndex = 0; keyIndex < sortedKeys.size(); keyIndex++) {
|
||||
final Key key = sortedKeys.get(keyIndex);
|
||||
// Excluding from touch position correction arrays
|
||||
if (!needsProximityInfo(key)) {
|
||||
continue;
|
||||
}
|
||||
final Rect hitBox = key.getHitBox();
|
||||
sweetSpotCenterXs[infoIndex] = hitBox.exactCenterX();
|
||||
sweetSpotCenterYs[infoIndex] = hitBox.exactCenterY();
|
||||
sweetSpotRadii[infoIndex] = defaultRadius;
|
||||
final int row = hitBox.top / mMostCommonKeyHeight;
|
||||
if (row < rows) {
|
||||
final int hitBoxWidth = hitBox.width();
|
||||
final int hitBoxHeight = hitBox.height();
|
||||
final float hitBoxDiagonal = (float)Math.hypot(hitBoxWidth, hitBoxHeight);
|
||||
sweetSpotCenterXs[infoIndex] +=
|
||||
touchPositionCorrection.getX(row) * hitBoxWidth;
|
||||
sweetSpotCenterYs[infoIndex] +=
|
||||
touchPositionCorrection.getY(row) * hitBoxHeight;
|
||||
sweetSpotRadii[infoIndex] =
|
||||
touchPositionCorrection.getRadius(row) * hitBoxDiagonal;
|
||||
}
|
||||
if (DEBUG) {
|
||||
Log.d(TAG, String.format(
|
||||
" [%2d] row=%d x/y/r=%7.2f/%7.2f/%5.2f %s code=%s", infoIndex, row,
|
||||
sweetSpotCenterXs[infoIndex], sweetSpotCenterYs[infoIndex],
|
||||
sweetSpotRadii[infoIndex], (row < rows ? "correct" : "default"),
|
||||
Constants.printableCode(key.getCode())));
|
||||
}
|
||||
infoIndex++;
|
||||
}
|
||||
} else {
|
||||
sweetSpotCenterXs = sweetSpotCenterYs = sweetSpotRadii = null;
|
||||
if (DEBUG) {
|
||||
Log.d(TAG, "touchPositionCorrection: OFF");
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Stop passing proximityCharsArray
|
||||
return setProximityInfoNative(mKeyboardMinWidth, mKeyboardHeight, mGridWidth, mGridHeight,
|
||||
mMostCommonKeyWidth, mMostCommonKeyHeight, proximityCharsArray, keyCount,
|
||||
keyXCoordinates, keyYCoordinates, keyWidths, keyHeights, keyCharCodes,
|
||||
sweetSpotCenterXs, sweetSpotCenterYs, sweetSpotRadii);
|
||||
}
|
||||
|
||||
public long getNativeProximityInfo() {
|
||||
return mNativeProximityInfo;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void finalize() throws Throwable {
|
||||
try {
|
||||
if (mNativeProximityInfo != 0) {
|
||||
releaseProximityInfoNative(mNativeProximityInfo);
|
||||
mNativeProximityInfo = 0;
|
||||
}
|
||||
} finally {
|
||||
super.finalize();
|
||||
}
|
||||
}
|
||||
|
||||
private void computeNearestNeighbors() {
|
||||
final int defaultWidth = mMostCommonKeyWidth;
|
||||
final int keyCount = mSortedKeys.size();
|
||||
final int gridSize = mGridNeighbors.length;
|
||||
final int threshold = (int) (defaultWidth * SEARCH_DISTANCE);
|
||||
final int thresholdSquared = threshold * threshold;
|
||||
// Round-up so we don't have any pixels outside the grid
|
||||
final int lastPixelXCoordinate = mGridWidth * mCellWidth - 1;
|
||||
final int lastPixelYCoordinate = mGridHeight * mCellHeight - 1;
|
||||
|
||||
// For large layouts, 'neighborsFlatBuffer' is about 80k of memory: gridSize is usually 512,
|
||||
// keycount is about 40 and a pointer to a Key is 4 bytes. This contains, for each cell,
|
||||
// enough space for as many keys as there are on the keyboard. Hence, every
|
||||
// keycount'th element is the start of a new cell, and each of these virtual subarrays
|
||||
// start empty with keycount spaces available. This fills up gradually in the loop below.
|
||||
// Since in the practice each cell does not have a lot of neighbors, most of this space is
|
||||
// actually just empty padding in this fixed-size buffer.
|
||||
final Key[] neighborsFlatBuffer = new Key[gridSize * keyCount];
|
||||
final int[] neighborCountPerCell = new int[gridSize];
|
||||
final int halfCellWidth = mCellWidth / 2;
|
||||
final int halfCellHeight = mCellHeight / 2;
|
||||
for (final Key key : mSortedKeys) {
|
||||
if (key.isSpacer()) continue;
|
||||
|
||||
/* HOW WE PRE-SELECT THE CELLS (iterate over only the relevant cells, instead of all of them)
|
||||
|
||||
We want to compute the distance for keys that are in the cells that are close enough to the
|
||||
key border, as this method is performance-critical. These keys are represented with 'star'
|
||||
background on the diagram below. Let's consider the Y case first.
|
||||
|
||||
We want to select the cells which center falls between the top of the key minus the threshold,
|
||||
and the bottom of the key plus the threshold.
|
||||
topPixelWithinThreshold is key.mY - threshold, and bottomPixelWithinThreshold is
|
||||
key.mY + key.mHeight + threshold.
|
||||
|
||||
Then we need to compute the center of the top row that we need to evaluate, as we'll iterate
|
||||
from there.
|
||||
|
||||
(0,0)----> x
|
||||
| .-------------------------------------------.
|
||||
| | | | | | | | | | | | |
|
||||
| |---+---+---+---+---+---+---+---+---+---+---| .- top of top cell (aligned on the grid)
|
||||
| | | | | | | | | | | | | |
|
||||
| |-----------+---+---+---+---+---+---+---+---|---' v
|
||||
| | | | |***|***|*_________________________ topPixelWithinThreshold | yDeltaToGrid
|
||||
| |---+---+---+-----^-+-|-+---+---+---+---+---| ^
|
||||
| | | | |***|*|*|*|*|***|***| | | | ______________________________________
|
||||
v |---+---+--threshold--|-+---+---+---+---+---| |
|
||||
| | | |***|*|*|*|*|***|***| | | | | Starting from key.mY, we substract
|
||||
y |---+---+---+---+-v-+-|-+---+---+---+---+---| | thresholdBase and get the top pixel
|
||||
| | | |***|**########------------------- key.mY | within the threshold. We align that on
|
||||
|---+---+---+---+--#+---+-#-+---+---+---+---| | the grid by computing the delta to the
|
||||
| | | |***|**#|***|*#*|***| | | | | grid, and get the top of the top cell.
|
||||
|---+---+---+---+--#+---+-#-+---+---+---+---| |
|
||||
| | | |***|**########*|***| | | | | Adding half the cell height to the top
|
||||
|---+---+---+---+---+-|-+---+---+---+---+---| | of the top cell, we get the middle of
|
||||
| | | |***|***|*|*|***|***| | | | | the top cell (yMiddleOfTopCell).
|
||||
|---+---+---+---+---+-|-+---+---+---+---+---| |
|
||||
| | | |***|***|*|*|***|***| | | | |
|
||||
|---+---+---+---+---+-|________________________ yEnd | Since we only want to add the key to
|
||||
| | | | | | | (bottomPixelWithinThreshold) | the proximity if it's close enough to
|
||||
|---+---+---+---+---+---+---+---+---+---+---| | the center of the cell, we only need
|
||||
| | | | | | | | | | | | | to compute for these cells where
|
||||
'---'---'---'---'---'---'---'---'---'---'---' | topPixelWithinThreshold is above the
|
||||
(positive x,y) | center of the cell. This is the case
|
||||
| when yDeltaToGrid is less than half
|
||||
[Zoomed in diagram] | the height of the cell.
|
||||
+-------+-------+-------+-------+-------+ |
|
||||
| | | | | | | On the zoomed in diagram, on the right
|
||||
| | | | | | | the topPixelWithinThreshold (represented
|
||||
| | | | | | top of | with an = sign) is below and we can skip
|
||||
+-------+-------+-------+--v----+-------+ .. top cell | this cell, while on the left it's above
|
||||
| | = topPixelWT | | yDeltaToGrid | and we need to compute for this cell.
|
||||
|..yStart.|.....|.......|..|....|.......|... y middle | Thus, if yDeltaToGrid is more than half
|
||||
| (left)| | | ^ = | | of top cell | the height of the cell, we start the
|
||||
+-------+-|-----+-------+----|--+-------+ | iteration one cell below the top cell,
|
||||
| | | | | | | | | else we start it on the top cell. This
|
||||
|.......|.|.....|.......|....|..|.....yStart (right) | is stored in yStart.
|
||||
|
||||
Since we only want to go up to bottomPixelWithinThreshold, and we only iterate on the center
|
||||
of the keys, we can stop as soon as the y value exceeds bottomPixelThreshold, so we don't
|
||||
have to align this on the center of the key. Hence, we don't need a separate value for
|
||||
bottomPixelWithinThreshold and call this yEnd right away.
|
||||
*/
|
||||
final int keyX = key.getX();
|
||||
final int keyY = key.getY();
|
||||
final int topPixelWithinThreshold = keyY - threshold;
|
||||
final int yDeltaToGrid = topPixelWithinThreshold % mCellHeight;
|
||||
final int yMiddleOfTopCell = topPixelWithinThreshold - yDeltaToGrid + halfCellHeight;
|
||||
final int yStart = Math.max(halfCellHeight,
|
||||
yMiddleOfTopCell + (yDeltaToGrid <= halfCellHeight ? 0 : mCellHeight));
|
||||
final int yEnd = Math.min(lastPixelYCoordinate, keyY + key.getHeight() + threshold);
|
||||
|
||||
final int leftPixelWithinThreshold = keyX - threshold;
|
||||
final int xDeltaToGrid = leftPixelWithinThreshold % mCellWidth;
|
||||
final int xMiddleOfLeftCell = leftPixelWithinThreshold - xDeltaToGrid + halfCellWidth;
|
||||
final int xStart = Math.max(halfCellWidth,
|
||||
xMiddleOfLeftCell + (xDeltaToGrid <= halfCellWidth ? 0 : mCellWidth));
|
||||
final int xEnd = Math.min(lastPixelXCoordinate, keyX + key.getWidth() + threshold);
|
||||
|
||||
int baseIndexOfCurrentRow = (yStart / mCellHeight) * mGridWidth + (xStart / mCellWidth);
|
||||
for (int centerY = yStart; centerY <= yEnd; centerY += mCellHeight) {
|
||||
int index = baseIndexOfCurrentRow;
|
||||
for (int centerX = xStart; centerX <= xEnd; centerX += mCellWidth) {
|
||||
if (key.squaredDistanceToEdge(centerX, centerY) < thresholdSquared) {
|
||||
neighborsFlatBuffer[index * keyCount + neighborCountPerCell[index]] = key;
|
||||
++neighborCountPerCell[index];
|
||||
}
|
||||
++index;
|
||||
}
|
||||
baseIndexOfCurrentRow += mGridWidth;
|
||||
}
|
||||
}
|
||||
|
||||
for (int i = 0; i < gridSize; ++i) {
|
||||
final int indexStart = i * keyCount;
|
||||
final int indexEnd = indexStart + neighborCountPerCell[i];
|
||||
final ArrayList<Key> neighbors = new ArrayList<>(indexEnd - indexStart);
|
||||
for (int index = indexStart; index < indexEnd; index++) {
|
||||
neighbors.add(neighborsFlatBuffer[index]);
|
||||
}
|
||||
mGridNeighbors[i] = Collections.unmodifiableList(neighbors);
|
||||
}
|
||||
}
|
||||
|
||||
public void fillArrayWithNearestKeyCodes(final int x, final int y, final int primaryKeyCode,
|
||||
final int[] dest) {
|
||||
final int destLength = dest.length;
|
||||
if (destLength < 1) {
|
||||
return;
|
||||
}
|
||||
int index = 0;
|
||||
if (primaryKeyCode > Constants.CODE_SPACE) {
|
||||
dest[index++] = primaryKeyCode;
|
||||
}
|
||||
final List<Key> nearestKeys = getNearestKeys(x, y);
|
||||
for (Key key : nearestKeys) {
|
||||
if (index >= destLength) {
|
||||
break;
|
||||
}
|
||||
final int code = key.getCode();
|
||||
if (code <= Constants.CODE_SPACE) {
|
||||
break;
|
||||
}
|
||||
dest[index++] = code;
|
||||
}
|
||||
if (index < destLength) {
|
||||
dest[index] = Constants.NOT_A_CODE;
|
||||
}
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public List<Key> getNearestKeys(final int x, final int y) {
|
||||
if (x >= 0 && x < mKeyboardMinWidth && y >= 0 && y < mKeyboardHeight) {
|
||||
int index = (y / mCellHeight) * mGridWidth + (x / mCellWidth);
|
||||
if (index < mGridSize) {
|
||||
return mGridNeighbors[index];
|
||||
}
|
||||
}
|
||||
return EMPTY_KEY_LIST;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,676 @@
|
|||
/*
|
||||
* Copyright (C) 2008 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.android.inputmethod.latin;
|
||||
|
||||
import android.text.TextUtils;
|
||||
import android.util.Log;
|
||||
import android.util.SparseArray;
|
||||
|
||||
import org.dslul.openboard.inputmethod.annotations.UsedForTesting;
|
||||
import org.dslul.openboard.inputmethod.latin.Dictionary;
|
||||
import org.dslul.openboard.inputmethod.latin.NgramContext;
|
||||
import org.dslul.openboard.inputmethod.latin.SuggestedWords.SuggestedWordInfo;
|
||||
import org.dslul.openboard.inputmethod.latin.common.ComposedData;
|
||||
import org.dslul.openboard.inputmethod.latin.common.Constants;
|
||||
import org.dslul.openboard.inputmethod.latin.common.FileUtils;
|
||||
import org.dslul.openboard.inputmethod.latin.common.InputPointers;
|
||||
import org.dslul.openboard.inputmethod.latin.common.StringUtils;
|
||||
import org.dslul.openboard.inputmethod.latin.makedict.DictionaryHeader;
|
||||
import org.dslul.openboard.inputmethod.latin.makedict.FormatSpec;
|
||||
import org.dslul.openboard.inputmethod.latin.makedict.FormatSpec.DictionaryOptions;
|
||||
import org.dslul.openboard.inputmethod.latin.makedict.UnsupportedFormatException;
|
||||
import org.dslul.openboard.inputmethod.latin.makedict.WordProperty;
|
||||
import org.dslul.openboard.inputmethod.latin.settings.SettingsValuesForSuggestion;
|
||||
import com.android.inputmethod.latin.utils.BinaryDictionaryUtils;
|
||||
import org.dslul.openboard.inputmethod.latin.utils.JniUtils;
|
||||
import com.android.inputmethod.latin.utils.WordInputEventForPersonalization;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
/**
|
||||
* Implements a static, compacted, binary dictionary of standard words.
|
||||
*/
|
||||
// TODO: All methods which should be locked need to have a suffix "Locked".
|
||||
public final class BinaryDictionary extends Dictionary {
|
||||
private static final String TAG = BinaryDictionary.class.getSimpleName();
|
||||
|
||||
// The cutoff returned by native for auto-commit confidence.
|
||||
// Must be equal to CONFIDENCE_TO_AUTO_COMMIT in native/jni/src/defines.h
|
||||
private static final int CONFIDENCE_TO_AUTO_COMMIT = 1000000;
|
||||
|
||||
public static final int DICTIONARY_MAX_WORD_LENGTH = 48;
|
||||
public static final int MAX_PREV_WORD_COUNT_FOR_N_GRAM = 3;
|
||||
|
||||
@UsedForTesting
|
||||
public static final String UNIGRAM_COUNT_QUERY = "UNIGRAM_COUNT";
|
||||
@UsedForTesting
|
||||
public static final String BIGRAM_COUNT_QUERY = "BIGRAM_COUNT";
|
||||
@UsedForTesting
|
||||
public static final String MAX_UNIGRAM_COUNT_QUERY = "MAX_UNIGRAM_COUNT";
|
||||
@UsedForTesting
|
||||
public static final String MAX_BIGRAM_COUNT_QUERY = "MAX_BIGRAM_COUNT";
|
||||
|
||||
public static final int NOT_A_VALID_TIMESTAMP = -1;
|
||||
|
||||
// Format to get unigram flags from native side via getWordPropertyNative().
|
||||
private static final int FORMAT_WORD_PROPERTY_OUTPUT_FLAG_COUNT = 5;
|
||||
private static final int FORMAT_WORD_PROPERTY_IS_NOT_A_WORD_INDEX = 0;
|
||||
private static final int FORMAT_WORD_PROPERTY_IS_POSSIBLY_OFFENSIVE_INDEX = 1;
|
||||
private static final int FORMAT_WORD_PROPERTY_HAS_NGRAMS_INDEX = 2;
|
||||
private static final int FORMAT_WORD_PROPERTY_HAS_SHORTCUTS_INDEX = 3;
|
||||
private static final int FORMAT_WORD_PROPERTY_IS_BEGINNING_OF_SENTENCE_INDEX = 4;
|
||||
|
||||
// Format to get probability and historical info from native side via getWordPropertyNative().
|
||||
public static final int FORMAT_WORD_PROPERTY_OUTPUT_PROBABILITY_INFO_COUNT = 4;
|
||||
public static final int FORMAT_WORD_PROPERTY_PROBABILITY_INDEX = 0;
|
||||
public static final int FORMAT_WORD_PROPERTY_TIMESTAMP_INDEX = 1;
|
||||
public static final int FORMAT_WORD_PROPERTY_LEVEL_INDEX = 2;
|
||||
public static final int FORMAT_WORD_PROPERTY_COUNT_INDEX = 3;
|
||||
|
||||
public static final String DICT_FILE_NAME_SUFFIX_FOR_MIGRATION = ".migrate";
|
||||
public static final String DIR_NAME_SUFFIX_FOR_RECORD_MIGRATION = ".migrating";
|
||||
|
||||
private long mNativeDict;
|
||||
private final long mDictSize;
|
||||
private final String mDictFilePath;
|
||||
private final boolean mUseFullEditDistance;
|
||||
private final boolean mIsUpdatable;
|
||||
private boolean mHasUpdated;
|
||||
|
||||
private final SparseArray<DicTraverseSession> mDicTraverseSessions = new SparseArray<>();
|
||||
|
||||
// TODO: There should be a way to remove used DicTraverseSession objects from
|
||||
// {@code mDicTraverseSessions}.
|
||||
private DicTraverseSession getTraverseSession(final int traverseSessionId) {
|
||||
synchronized(mDicTraverseSessions) {
|
||||
DicTraverseSession traverseSession = mDicTraverseSessions.get(traverseSessionId);
|
||||
if (traverseSession == null) {
|
||||
traverseSession = new DicTraverseSession(mLocale, mNativeDict, mDictSize);
|
||||
mDicTraverseSessions.put(traverseSessionId, traverseSession);
|
||||
}
|
||||
return traverseSession;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs binary dictionary using existing dictionary file.
|
||||
* @param filename the name of the file to read through native code.
|
||||
* @param offset the offset of the dictionary data within the file.
|
||||
* @param length the length of the binary data.
|
||||
* @param useFullEditDistance whether to use the full edit distance in suggestions
|
||||
* @param dictType the dictionary type, as a human-readable string
|
||||
* @param isUpdatable whether to open the dictionary file in writable mode.
|
||||
*/
|
||||
public BinaryDictionary(final String filename, final long offset, final long length,
|
||||
final boolean useFullEditDistance, final Locale locale, final String dictType,
|
||||
final boolean isUpdatable) {
|
||||
super(dictType, locale);
|
||||
mDictSize = length;
|
||||
mDictFilePath = filename;
|
||||
mIsUpdatable = isUpdatable;
|
||||
mHasUpdated = false;
|
||||
mUseFullEditDistance = useFullEditDistance;
|
||||
loadDictionary(filename, offset, length, isUpdatable);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs binary dictionary on memory.
|
||||
* @param filename the name of the file used to flush.
|
||||
* @param useFullEditDistance whether to use the full edit distance in suggestions
|
||||
* @param dictType the dictionary type, as a human-readable string
|
||||
* @param formatVersion the format version of the dictionary
|
||||
* @param attributeMap the attributes of the dictionary
|
||||
*/
|
||||
public BinaryDictionary(final String filename, final boolean useFullEditDistance,
|
||||
final Locale locale, final String dictType, final long formatVersion,
|
||||
final Map<String, String> attributeMap) {
|
||||
super(dictType, locale);
|
||||
mDictSize = 0;
|
||||
mDictFilePath = filename;
|
||||
// On memory dictionary is always updatable.
|
||||
mIsUpdatable = true;
|
||||
mHasUpdated = false;
|
||||
mUseFullEditDistance = useFullEditDistance;
|
||||
final String[] keyArray = new String[attributeMap.size()];
|
||||
final String[] valueArray = new String[attributeMap.size()];
|
||||
int index = 0;
|
||||
for (final String key : attributeMap.keySet()) {
|
||||
keyArray[index] = key;
|
||||
valueArray[index] = attributeMap.get(key);
|
||||
index++;
|
||||
}
|
||||
mNativeDict = createOnMemoryNative(formatVersion, locale.toString(), keyArray, valueArray);
|
||||
}
|
||||
|
||||
|
||||
static {
|
||||
JniUtils.loadNativeLibrary();
|
||||
}
|
||||
|
||||
private static native long openNative(String sourceDir, long dictOffset, long dictSize,
|
||||
boolean isUpdatable);
|
||||
private static native long createOnMemoryNative(long formatVersion,
|
||||
String locale, String[] attributeKeyStringArray, String[] attributeValueStringArray);
|
||||
private static native void getHeaderInfoNative(long dict, int[] outHeaderSize,
|
||||
int[] outFormatVersion, ArrayList<int[]> outAttributeKeys,
|
||||
ArrayList<int[]> outAttributeValues);
|
||||
private static native boolean flushNative(long dict, String filePath);
|
||||
private static native boolean needsToRunGCNative(long dict, boolean mindsBlockByGC);
|
||||
private static native boolean flushWithGCNative(long dict, String filePath);
|
||||
private static native void closeNative(long dict);
|
||||
private static native int getFormatVersionNative(long dict);
|
||||
private static native int getProbabilityNative(long dict, int[] word);
|
||||
private static native int getMaxProbabilityOfExactMatchesNative(long dict, int[] word);
|
||||
private static native int getNgramProbabilityNative(long dict, int[][] prevWordCodePointArrays,
|
||||
boolean[] isBeginningOfSentenceArray, int[] word);
|
||||
private static native void getWordPropertyNative(long dict, int[] word,
|
||||
boolean isBeginningOfSentence, int[] outCodePoints, boolean[] outFlags,
|
||||
int[] outProbabilityInfo, ArrayList<int[][]> outNgramPrevWordsArray,
|
||||
ArrayList<boolean[]> outNgramPrevWordIsBeginningOfSentenceArray,
|
||||
ArrayList<int[]> outNgramTargets, ArrayList<int[]> outNgramProbabilityInfo,
|
||||
ArrayList<int[]> outShortcutTargets, ArrayList<Integer> outShortcutProbabilities);
|
||||
private static native int getNextWordNative(long dict, int token, int[] outCodePoints,
|
||||
boolean[] outIsBeginningOfSentence);
|
||||
private static native void getSuggestionsNative(long dict, long proximityInfo,
|
||||
long traverseSession, int[] xCoordinates, int[] yCoordinates, int[] times,
|
||||
int[] pointerIds, int[] inputCodePoints, int inputSize, int[] suggestOptions,
|
||||
int[][] prevWordCodePointArrays, boolean[] isBeginningOfSentenceArray,
|
||||
int prevWordCount, int[] outputSuggestionCount, int[] outputCodePoints,
|
||||
int[] outputScores, int[] outputIndices, int[] outputTypes,
|
||||
int[] outputAutoCommitFirstWordConfidence,
|
||||
float[] inOutWeightOfLangModelVsSpatialModel);
|
||||
private static native boolean addUnigramEntryNative(long dict, int[] word, int probability,
|
||||
int[] shortcutTarget, int shortcutProbability, boolean isBeginningOfSentence,
|
||||
boolean isNotAWord, boolean isPossiblyOffensive, int timestamp);
|
||||
private static native boolean removeUnigramEntryNative(long dict, int[] word);
|
||||
private static native boolean addNgramEntryNative(long dict,
|
||||
int[][] prevWordCodePointArrays, boolean[] isBeginningOfSentenceArray,
|
||||
int[] word, int probability, int timestamp);
|
||||
private static native boolean removeNgramEntryNative(long dict,
|
||||
int[][] prevWordCodePointArrays, boolean[] isBeginningOfSentenceArray, int[] word);
|
||||
private static native boolean updateEntriesForWordWithNgramContextNative(long dict,
|
||||
int[][] prevWordCodePointArrays, boolean[] isBeginningOfSentenceArray,
|
||||
int[] word, boolean isValidWord, int count, int timestamp);
|
||||
private static native int updateEntriesForInputEventsNative(long dict,
|
||||
WordInputEventForPersonalization[] inputEvents, int startIndex);
|
||||
private static native String getPropertyNative(long dict, String query);
|
||||
private static native boolean isCorruptedNative(long dict);
|
||||
private static native boolean migrateNative(long dict, String dictFilePath,
|
||||
long newFormatVersion);
|
||||
|
||||
// TODO: Move native dict into session
|
||||
private void loadDictionary(final String path, final long startOffset,
|
||||
final long length, final boolean isUpdatable) {
|
||||
mHasUpdated = false;
|
||||
mNativeDict = openNative(path, startOffset, length, isUpdatable);
|
||||
}
|
||||
|
||||
// TODO: Check isCorrupted() for main dictionaries.
|
||||
public boolean isCorrupted() {
|
||||
if (!isValidDictionary()) {
|
||||
return false;
|
||||
}
|
||||
if (!isCorruptedNative(mNativeDict)) {
|
||||
return false;
|
||||
}
|
||||
// TODO: Record the corruption.
|
||||
Log.e(TAG, "BinaryDictionary (" + mDictFilePath + ") is corrupted.");
|
||||
Log.e(TAG, "locale: " + mLocale);
|
||||
Log.e(TAG, "dict size: " + mDictSize);
|
||||
Log.e(TAG, "updatable: " + mIsUpdatable);
|
||||
return true;
|
||||
}
|
||||
|
||||
public DictionaryHeader getHeader() throws UnsupportedFormatException {
|
||||
if (mNativeDict == 0) {
|
||||
return null;
|
||||
}
|
||||
final int[] outHeaderSize = new int[1];
|
||||
final int[] outFormatVersion = new int[1];
|
||||
final ArrayList<int[]> outAttributeKeys = new ArrayList<>();
|
||||
final ArrayList<int[]> outAttributeValues = new ArrayList<>();
|
||||
getHeaderInfoNative(mNativeDict, outHeaderSize, outFormatVersion, outAttributeKeys,
|
||||
outAttributeValues);
|
||||
final HashMap<String, String> attributes = new HashMap<>();
|
||||
for (int i = 0; i < outAttributeKeys.size(); i++) {
|
||||
final String attributeKey = StringUtils.getStringFromNullTerminatedCodePointArray(
|
||||
outAttributeKeys.get(i));
|
||||
final String attributeValue = StringUtils.getStringFromNullTerminatedCodePointArray(
|
||||
outAttributeValues.get(i));
|
||||
attributes.put(attributeKey, attributeValue);
|
||||
}
|
||||
final boolean hasHistoricalInfo = DictionaryHeader.ATTRIBUTE_VALUE_TRUE.equals(
|
||||
attributes.get(DictionaryHeader.HAS_HISTORICAL_INFO_KEY));
|
||||
return new DictionaryHeader(outHeaderSize[0], new DictionaryOptions(attributes),
|
||||
new FormatSpec.FormatOptions(outFormatVersion[0], hasHistoricalInfo));
|
||||
}
|
||||
|
||||
@Override
|
||||
public ArrayList<SuggestedWordInfo> getSuggestions(final ComposedData composedData,
|
||||
final NgramContext ngramContext, final long proximityInfoHandle,
|
||||
final SettingsValuesForSuggestion settingsValuesForSuggestion,
|
||||
final int sessionId, final float weightForLocale,
|
||||
final float[] inOutWeightOfLangModelVsSpatialModel) {
|
||||
if (!isValidDictionary()) {
|
||||
return null;
|
||||
}
|
||||
final DicTraverseSession session = getTraverseSession(sessionId);
|
||||
Arrays.fill(session.mInputCodePoints, Constants.NOT_A_CODE);
|
||||
ngramContext.outputToArray(session.mPrevWordCodePointArrays,
|
||||
session.mIsBeginningOfSentenceArray);
|
||||
final InputPointers inputPointers = composedData.mInputPointers;
|
||||
final boolean isGesture = composedData.mIsBatchMode;
|
||||
final int inputSize;
|
||||
if (!isGesture) {
|
||||
inputSize =
|
||||
composedData.copyCodePointsExceptTrailingSingleQuotesAndReturnCodePointCount(
|
||||
session.mInputCodePoints);
|
||||
if (inputSize < 0) {
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
inputSize = inputPointers.getPointerSize();
|
||||
}
|
||||
session.mNativeSuggestOptions.setUseFullEditDistance(mUseFullEditDistance);
|
||||
session.mNativeSuggestOptions.setIsGesture(isGesture);
|
||||
session.mNativeSuggestOptions.setBlockOffensiveWords(
|
||||
settingsValuesForSuggestion.mBlockPotentiallyOffensive);
|
||||
session.mNativeSuggestOptions.setWeightForLocale(weightForLocale);
|
||||
if (inOutWeightOfLangModelVsSpatialModel != null) {
|
||||
session.mInputOutputWeightOfLangModelVsSpatialModel[0] =
|
||||
inOutWeightOfLangModelVsSpatialModel[0];
|
||||
} else {
|
||||
session.mInputOutputWeightOfLangModelVsSpatialModel[0] =
|
||||
Dictionary.NOT_A_WEIGHT_OF_LANG_MODEL_VS_SPATIAL_MODEL;
|
||||
}
|
||||
// TOOD: Pass multiple previous words information for n-gram.
|
||||
getSuggestionsNative(mNativeDict, proximityInfoHandle,
|
||||
getTraverseSession(sessionId).getSession(), inputPointers.getXCoordinates(),
|
||||
inputPointers.getYCoordinates(), inputPointers.getTimes(),
|
||||
inputPointers.getPointerIds(), session.mInputCodePoints, inputSize,
|
||||
session.mNativeSuggestOptions.getOptions(), session.mPrevWordCodePointArrays,
|
||||
session.mIsBeginningOfSentenceArray, ngramContext.getPrevWordCount(),
|
||||
session.mOutputSuggestionCount, session.mOutputCodePoints, session.mOutputScores,
|
||||
session.mSpaceIndices, session.mOutputTypes,
|
||||
session.mOutputAutoCommitFirstWordConfidence,
|
||||
session.mInputOutputWeightOfLangModelVsSpatialModel);
|
||||
if (inOutWeightOfLangModelVsSpatialModel != null) {
|
||||
inOutWeightOfLangModelVsSpatialModel[0] =
|
||||
session.mInputOutputWeightOfLangModelVsSpatialModel[0];
|
||||
}
|
||||
final int count = session.mOutputSuggestionCount[0];
|
||||
final ArrayList<SuggestedWordInfo> suggestions = new ArrayList<>();
|
||||
for (int j = 0; j < count; ++j) {
|
||||
final int start = j * DICTIONARY_MAX_WORD_LENGTH;
|
||||
int len = 0;
|
||||
while (len < DICTIONARY_MAX_WORD_LENGTH
|
||||
&& session.mOutputCodePoints[start + len] != 0) {
|
||||
++len;
|
||||
}
|
||||
if (len > 0) {
|
||||
suggestions.add(new SuggestedWordInfo(
|
||||
new String(session.mOutputCodePoints, start, len),
|
||||
"" /* prevWordsContext */,
|
||||
(int)(session.mOutputScores[j] * weightForLocale),
|
||||
session.mOutputTypes[j],
|
||||
this /* sourceDict */,
|
||||
session.mSpaceIndices[j] /* indexOfTouchPointOfSecondWord */,
|
||||
session.mOutputAutoCommitFirstWordConfidence[0]));
|
||||
}
|
||||
}
|
||||
return suggestions;
|
||||
}
|
||||
|
||||
public boolean isValidDictionary() {
|
||||
return mNativeDict != 0;
|
||||
}
|
||||
|
||||
public int getFormatVersion() {
|
||||
return getFormatVersionNative(mNativeDict);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInDictionary(final String word) {
|
||||
return getFrequency(word) != NOT_A_PROBABILITY;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getFrequency(final String word) {
|
||||
if (TextUtils.isEmpty(word)) {
|
||||
return NOT_A_PROBABILITY;
|
||||
}
|
||||
final int[] codePoints = StringUtils.toCodePointArray(word);
|
||||
return getProbabilityNative(mNativeDict, codePoints);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getMaxFrequencyOfExactMatches(final String word) {
|
||||
if (TextUtils.isEmpty(word)) {
|
||||
return NOT_A_PROBABILITY;
|
||||
}
|
||||
final int[] codePoints = StringUtils.toCodePointArray(word);
|
||||
return getMaxProbabilityOfExactMatchesNative(mNativeDict, codePoints);
|
||||
}
|
||||
|
||||
@UsedForTesting
|
||||
public boolean isValidNgram(final NgramContext ngramContext, final String word) {
|
||||
return getNgramProbability(ngramContext, word) != NOT_A_PROBABILITY;
|
||||
}
|
||||
|
||||
public int getNgramProbability(final NgramContext ngramContext, final String word) {
|
||||
if (!ngramContext.isValid() || TextUtils.isEmpty(word)) {
|
||||
return NOT_A_PROBABILITY;
|
||||
}
|
||||
final int[][] prevWordCodePointArrays = new int[ngramContext.getPrevWordCount()][];
|
||||
final boolean[] isBeginningOfSentenceArray = new boolean[ngramContext.getPrevWordCount()];
|
||||
ngramContext.outputToArray(prevWordCodePointArrays, isBeginningOfSentenceArray);
|
||||
final int[] wordCodePoints = StringUtils.toCodePointArray(word);
|
||||
return getNgramProbabilityNative(mNativeDict, prevWordCodePointArrays,
|
||||
isBeginningOfSentenceArray, wordCodePoints);
|
||||
}
|
||||
|
||||
public WordProperty getWordProperty(final String word, final boolean isBeginningOfSentence) {
|
||||
if (word == null) {
|
||||
return null;
|
||||
}
|
||||
final int[] codePoints = StringUtils.toCodePointArray(word);
|
||||
final int[] outCodePoints = new int[DICTIONARY_MAX_WORD_LENGTH];
|
||||
final boolean[] outFlags = new boolean[FORMAT_WORD_PROPERTY_OUTPUT_FLAG_COUNT];
|
||||
final int[] outProbabilityInfo =
|
||||
new int[FORMAT_WORD_PROPERTY_OUTPUT_PROBABILITY_INFO_COUNT];
|
||||
final ArrayList<int[][]> outNgramPrevWordsArray = new ArrayList<>();
|
||||
final ArrayList<boolean[]> outNgramPrevWordIsBeginningOfSentenceArray =
|
||||
new ArrayList<>();
|
||||
final ArrayList<int[]> outNgramTargets = new ArrayList<>();
|
||||
final ArrayList<int[]> outNgramProbabilityInfo = new ArrayList<>();
|
||||
final ArrayList<int[]> outShortcutTargets = new ArrayList<>();
|
||||
final ArrayList<Integer> outShortcutProbabilities = new ArrayList<>();
|
||||
getWordPropertyNative(mNativeDict, codePoints, isBeginningOfSentence, outCodePoints,
|
||||
outFlags, outProbabilityInfo, outNgramPrevWordsArray,
|
||||
outNgramPrevWordIsBeginningOfSentenceArray, outNgramTargets,
|
||||
outNgramProbabilityInfo, outShortcutTargets, outShortcutProbabilities);
|
||||
return new WordProperty(codePoints,
|
||||
outFlags[FORMAT_WORD_PROPERTY_IS_NOT_A_WORD_INDEX],
|
||||
outFlags[FORMAT_WORD_PROPERTY_IS_POSSIBLY_OFFENSIVE_INDEX],
|
||||
outFlags[FORMAT_WORD_PROPERTY_HAS_NGRAMS_INDEX],
|
||||
outFlags[FORMAT_WORD_PROPERTY_HAS_SHORTCUTS_INDEX],
|
||||
outFlags[FORMAT_WORD_PROPERTY_IS_BEGINNING_OF_SENTENCE_INDEX], outProbabilityInfo,
|
||||
outNgramPrevWordsArray, outNgramPrevWordIsBeginningOfSentenceArray,
|
||||
outNgramTargets, outNgramProbabilityInfo, outShortcutTargets,
|
||||
outShortcutProbabilities);
|
||||
}
|
||||
|
||||
public static class GetNextWordPropertyResult {
|
||||
public WordProperty mWordProperty;
|
||||
public int mNextToken;
|
||||
|
||||
public GetNextWordPropertyResult(final WordProperty wordProperty, final int nextToken) {
|
||||
mWordProperty = wordProperty;
|
||||
mNextToken = nextToken;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Method to iterate all words in the dictionary for makedict.
|
||||
* If token is 0, this method newly starts iterating the dictionary.
|
||||
*/
|
||||
public GetNextWordPropertyResult getNextWordProperty(final int token) {
|
||||
final int[] codePoints = new int[DICTIONARY_MAX_WORD_LENGTH];
|
||||
final boolean[] isBeginningOfSentence = new boolean[1];
|
||||
final int nextToken = getNextWordNative(mNativeDict, token, codePoints,
|
||||
isBeginningOfSentence);
|
||||
final String word = StringUtils.getStringFromNullTerminatedCodePointArray(codePoints);
|
||||
return new GetNextWordPropertyResult(
|
||||
getWordProperty(word, isBeginningOfSentence[0]), nextToken);
|
||||
}
|
||||
|
||||
// Add a unigram entry to binary dictionary with unigram attributes in native code.
|
||||
public boolean addUnigramEntry(final String word, final int probability,
|
||||
final String shortcutTarget, final int shortcutProbability,
|
||||
final boolean isBeginningOfSentence, final boolean isNotAWord,
|
||||
final boolean isPossiblyOffensive, final int timestamp) {
|
||||
if (word == null || (word.isEmpty() && !isBeginningOfSentence)) {
|
||||
return false;
|
||||
}
|
||||
final int[] codePoints = StringUtils.toCodePointArray(word);
|
||||
final int[] shortcutTargetCodePoints = (shortcutTarget != null) ?
|
||||
StringUtils.toCodePointArray(shortcutTarget) : null;
|
||||
if (!addUnigramEntryNative(mNativeDict, codePoints, probability, shortcutTargetCodePoints,
|
||||
shortcutProbability, isBeginningOfSentence, isNotAWord, isPossiblyOffensive,
|
||||
timestamp)) {
|
||||
return false;
|
||||
}
|
||||
mHasUpdated = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
// Remove a unigram entry from the binary dictionary in native code.
|
||||
public boolean removeUnigramEntry(final String word) {
|
||||
if (TextUtils.isEmpty(word)) {
|
||||
return false;
|
||||
}
|
||||
final int[] codePoints = StringUtils.toCodePointArray(word);
|
||||
if (!removeUnigramEntryNative(mNativeDict, codePoints)) {
|
||||
return false;
|
||||
}
|
||||
mHasUpdated = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
// Add an n-gram entry to the binary dictionary with timestamp in native code.
|
||||
public boolean addNgramEntry(final NgramContext ngramContext, final String word,
|
||||
final int probability, final int timestamp) {
|
||||
if (!ngramContext.isValid() || TextUtils.isEmpty(word)) {
|
||||
return false;
|
||||
}
|
||||
final int[][] prevWordCodePointArrays = new int[ngramContext.getPrevWordCount()][];
|
||||
final boolean[] isBeginningOfSentenceArray = new boolean[ngramContext.getPrevWordCount()];
|
||||
ngramContext.outputToArray(prevWordCodePointArrays, isBeginningOfSentenceArray);
|
||||
final int[] wordCodePoints = StringUtils.toCodePointArray(word);
|
||||
if (!addNgramEntryNative(mNativeDict, prevWordCodePointArrays,
|
||||
isBeginningOfSentenceArray, wordCodePoints, probability, timestamp)) {
|
||||
return false;
|
||||
}
|
||||
mHasUpdated = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
// Update entries for the word occurrence with the ngramContext.
|
||||
public boolean updateEntriesForWordWithNgramContext(@Nonnull final NgramContext ngramContext,
|
||||
final String word, final boolean isValidWord, final int count, final int timestamp) {
|
||||
if (TextUtils.isEmpty(word)) {
|
||||
return false;
|
||||
}
|
||||
final int[][] prevWordCodePointArrays = new int[ngramContext.getPrevWordCount()][];
|
||||
final boolean[] isBeginningOfSentenceArray = new boolean[ngramContext.getPrevWordCount()];
|
||||
ngramContext.outputToArray(prevWordCodePointArrays, isBeginningOfSentenceArray);
|
||||
final int[] wordCodePoints = StringUtils.toCodePointArray(word);
|
||||
if (!updateEntriesForWordWithNgramContextNative(mNativeDict, prevWordCodePointArrays,
|
||||
isBeginningOfSentenceArray, wordCodePoints, isValidWord, count, timestamp)) {
|
||||
return false;
|
||||
}
|
||||
mHasUpdated = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
@UsedForTesting
|
||||
public void updateEntriesForInputEvents(final WordInputEventForPersonalization[] inputEvents) {
|
||||
if (!isValidDictionary()) {
|
||||
return;
|
||||
}
|
||||
int processedEventCount = 0;
|
||||
while (processedEventCount < inputEvents.length) {
|
||||
if (needsToRunGC(true /* mindsBlockByGC */)) {
|
||||
flushWithGC();
|
||||
}
|
||||
processedEventCount = updateEntriesForInputEventsNative(mNativeDict, inputEvents,
|
||||
processedEventCount);
|
||||
mHasUpdated = true;
|
||||
if (processedEventCount <= 0) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void reopen() {
|
||||
close();
|
||||
final File dictFile = new File(mDictFilePath);
|
||||
// WARNING: Because we pass 0 as the offset and file.length() as the length, this can
|
||||
// only be called for actual files. Right now it's only called by the flush() family of
|
||||
// functions, which require an updatable dictionary, so it's okay. But beware.
|
||||
loadDictionary(dictFile.getAbsolutePath(), 0 /* startOffset */,
|
||||
dictFile.length(), mIsUpdatable);
|
||||
}
|
||||
|
||||
// Flush to dict file if the dictionary has been updated.
|
||||
public boolean flush() {
|
||||
if (!isValidDictionary()) {
|
||||
return false;
|
||||
}
|
||||
if (mHasUpdated) {
|
||||
if (!flushNative(mNativeDict, mDictFilePath)) {
|
||||
return false;
|
||||
}
|
||||
reopen();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// Run GC and flush to dict file if the dictionary has been updated.
|
||||
public boolean flushWithGCIfHasUpdated() {
|
||||
if (mHasUpdated) {
|
||||
return flushWithGC();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// Run GC and flush to dict file.
|
||||
public boolean flushWithGC() {
|
||||
if (!isValidDictionary()) {
|
||||
return false;
|
||||
}
|
||||
if (!flushWithGCNative(mNativeDict, mDictFilePath)) {
|
||||
return false;
|
||||
}
|
||||
reopen();
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether GC is needed to run or not.
|
||||
* @param mindsBlockByGC Whether to mind operations blocked by GC. We don't need to care about
|
||||
* the blocking in some situations such as in idle time or just before closing.
|
||||
* @return whether GC is needed to run or not.
|
||||
*/
|
||||
public boolean needsToRunGC(final boolean mindsBlockByGC) {
|
||||
if (!isValidDictionary()) {
|
||||
return false;
|
||||
}
|
||||
return needsToRunGCNative(mNativeDict, mindsBlockByGC);
|
||||
}
|
||||
|
||||
public boolean migrateTo(final int newFormatVersion) {
|
||||
if (!isValidDictionary()) {
|
||||
return false;
|
||||
}
|
||||
final File isMigratingDir =
|
||||
new File(mDictFilePath + DIR_NAME_SUFFIX_FOR_RECORD_MIGRATION);
|
||||
if (isMigratingDir.exists()) {
|
||||
isMigratingDir.delete();
|
||||
Log.e(TAG, "Previous migration attempt failed probably due to a crash. "
|
||||
+ "Giving up using the old dictionary (" + mDictFilePath + ").");
|
||||
return false;
|
||||
}
|
||||
if (!isMigratingDir.mkdir()) {
|
||||
Log.e(TAG, "Cannot create a dir (" + isMigratingDir.getAbsolutePath()
|
||||
+ ") to record migration.");
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
final String tmpDictFilePath = mDictFilePath + DICT_FILE_NAME_SUFFIX_FOR_MIGRATION;
|
||||
if (!migrateNative(mNativeDict, tmpDictFilePath, newFormatVersion)) {
|
||||
return false;
|
||||
}
|
||||
close();
|
||||
final File dictFile = new File(mDictFilePath);
|
||||
final File tmpDictFile = new File(tmpDictFilePath);
|
||||
if (!FileUtils.deleteRecursively(dictFile)) {
|
||||
return false;
|
||||
}
|
||||
if (!BinaryDictionaryUtils.renameDict(tmpDictFile, dictFile)) {
|
||||
return false;
|
||||
}
|
||||
loadDictionary(dictFile.getAbsolutePath(), 0 /* startOffset */,
|
||||
dictFile.length(), mIsUpdatable);
|
||||
return true;
|
||||
} finally {
|
||||
isMigratingDir.delete();
|
||||
}
|
||||
}
|
||||
|
||||
@UsedForTesting
|
||||
public String getPropertyForGettingStats(final String query) {
|
||||
if (!isValidDictionary()) {
|
||||
return "";
|
||||
}
|
||||
return getPropertyNative(mNativeDict, query);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean shouldAutoCommit(final SuggestedWordInfo candidate) {
|
||||
return candidate.mAutoCommitFirstWordConfidence > CONFIDENCE_TO_AUTO_COMMIT;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
synchronized (mDicTraverseSessions) {
|
||||
final int sessionsSize = mDicTraverseSessions.size();
|
||||
for (int index = 0; index < sessionsSize; ++index) {
|
||||
final DicTraverseSession traverseSession = mDicTraverseSessions.valueAt(index);
|
||||
if (traverseSession != null) {
|
||||
traverseSession.close();
|
||||
}
|
||||
}
|
||||
mDicTraverseSessions.clear();
|
||||
}
|
||||
closeInternalLocked();
|
||||
}
|
||||
|
||||
private synchronized void closeInternalLocked() {
|
||||
if (mNativeDict != 0) {
|
||||
closeNative(mNativeDict);
|
||||
mNativeDict = 0;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Manage BinaryDictionary instances without using WeakReference or something.
|
||||
@Override
|
||||
protected void finalize() throws Throwable {
|
||||
try {
|
||||
closeInternalLocked();
|
||||
} finally {
|
||||
super.finalize();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,98 @@
|
|||
/*
|
||||
* Copyright (C) 2012, The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.android.inputmethod.latin;
|
||||
|
||||
import org.dslul.openboard.inputmethod.latin.common.NativeSuggestOptions;
|
||||
import org.dslul.openboard.inputmethod.latin.define.DecoderSpecificConstants;
|
||||
import org.dslul.openboard.inputmethod.latin.utils.JniUtils;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
public final class DicTraverseSession {
|
||||
static {
|
||||
JniUtils.loadNativeLibrary();
|
||||
}
|
||||
// Must be equal to MAX_RESULTS in native/jni/src/defines.h
|
||||
private static final int MAX_RESULTS = 18;
|
||||
public final int[] mInputCodePoints =
|
||||
new int[DecoderSpecificConstants.DICTIONARY_MAX_WORD_LENGTH];
|
||||
public final int[][] mPrevWordCodePointArrays =
|
||||
new int[DecoderSpecificConstants.MAX_PREV_WORD_COUNT_FOR_N_GRAM][];
|
||||
public final boolean[] mIsBeginningOfSentenceArray =
|
||||
new boolean[DecoderSpecificConstants.MAX_PREV_WORD_COUNT_FOR_N_GRAM];
|
||||
public final int[] mOutputSuggestionCount = new int[1];
|
||||
public final int[] mOutputCodePoints =
|
||||
new int[DecoderSpecificConstants.DICTIONARY_MAX_WORD_LENGTH * MAX_RESULTS];
|
||||
public final int[] mSpaceIndices = new int[MAX_RESULTS];
|
||||
public final int[] mOutputScores = new int[MAX_RESULTS];
|
||||
public final int[] mOutputTypes = new int[MAX_RESULTS];
|
||||
// Only one result is ever used
|
||||
public final int[] mOutputAutoCommitFirstWordConfidence = new int[1];
|
||||
public final float[] mInputOutputWeightOfLangModelVsSpatialModel = new float[1];
|
||||
|
||||
public final NativeSuggestOptions mNativeSuggestOptions = new NativeSuggestOptions();
|
||||
|
||||
private static native long setDicTraverseSessionNative(String locale, long dictSize);
|
||||
private static native void initDicTraverseSessionNative(long nativeDicTraverseSession,
|
||||
long dictionary, int[] previousWord, int previousWordLength);
|
||||
private static native void releaseDicTraverseSessionNative(long nativeDicTraverseSession);
|
||||
|
||||
private long mNativeDicTraverseSession;
|
||||
|
||||
public DicTraverseSession(Locale locale, long dictionary, long dictSize) {
|
||||
mNativeDicTraverseSession = createNativeDicTraverseSession(
|
||||
locale != null ? locale.toString() : "", dictSize);
|
||||
initSession(dictionary);
|
||||
}
|
||||
|
||||
public long getSession() {
|
||||
return mNativeDicTraverseSession;
|
||||
}
|
||||
|
||||
public void initSession(long dictionary) {
|
||||
initSession(dictionary, null, 0);
|
||||
}
|
||||
|
||||
public void initSession(long dictionary, int[] previousWord, int previousWordLength) {
|
||||
initDicTraverseSessionNative(
|
||||
mNativeDicTraverseSession, dictionary, previousWord, previousWordLength);
|
||||
}
|
||||
|
||||
private static long createNativeDicTraverseSession(String locale, long dictSize) {
|
||||
return setDicTraverseSessionNative(locale, dictSize);
|
||||
}
|
||||
|
||||
private void closeInternal() {
|
||||
if (mNativeDicTraverseSession != 0) {
|
||||
releaseDicTraverseSessionNative(mNativeDicTraverseSession);
|
||||
mNativeDicTraverseSession = 0;
|
||||
}
|
||||
}
|
||||
|
||||
public void close() {
|
||||
closeInternal();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void finalize() throws Throwable {
|
||||
try {
|
||||
closeInternal();
|
||||
} finally {
|
||||
super.finalize();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,129 @@
|
|||
/*
|
||||
* Copyright (C) 2014 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.android.inputmethod.latin.utils;
|
||||
|
||||
import org.dslul.openboard.inputmethod.annotations.UsedForTesting;
|
||||
import com.android.inputmethod.latin.BinaryDictionary;
|
||||
import org.dslul.openboard.inputmethod.latin.common.StringUtils;
|
||||
import org.dslul.openboard.inputmethod.latin.makedict.DictionaryHeader;
|
||||
import org.dslul.openboard.inputmethod.latin.makedict.UnsupportedFormatException;
|
||||
import org.dslul.openboard.inputmethod.latin.utils.JniUtils;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
public final class BinaryDictionaryUtils {
|
||||
private static final String TAG = BinaryDictionaryUtils.class.getSimpleName();
|
||||
|
||||
private BinaryDictionaryUtils() {
|
||||
// This utility class is not publicly instantiable.
|
||||
}
|
||||
|
||||
static {
|
||||
JniUtils.loadNativeLibrary();
|
||||
}
|
||||
|
||||
@UsedForTesting
|
||||
private static native boolean createEmptyDictFileNative(String filePath, long dictVersion,
|
||||
String locale, String[] attributeKeyStringArray, String[] attributeValueStringArray);
|
||||
private static native float calcNormalizedScoreNative(int[] before, int[] after, int score);
|
||||
private static native int setCurrentTimeForTestNative(int currentTime);
|
||||
|
||||
public static DictionaryHeader getHeader(final File dictFile)
|
||||
throws IOException, UnsupportedFormatException {
|
||||
return getHeaderWithOffsetAndLength(dictFile, 0 /* offset */, dictFile.length());
|
||||
}
|
||||
|
||||
public static DictionaryHeader getHeaderWithOffsetAndLength(final File dictFile,
|
||||
final long offset, final long length) throws IOException, UnsupportedFormatException {
|
||||
// dictType is never used for reading the header. Passing an empty string.
|
||||
final BinaryDictionary binaryDictionary = new BinaryDictionary(
|
||||
dictFile.getAbsolutePath(), offset, length,
|
||||
true /* useFullEditDistance */, null /* locale */, "" /* dictType */,
|
||||
false /* isUpdatable */);
|
||||
final DictionaryHeader header = binaryDictionary.getHeader();
|
||||
binaryDictionary.close();
|
||||
if (header == null) {
|
||||
throw new IOException();
|
||||
}
|
||||
return header;
|
||||
}
|
||||
|
||||
public static boolean renameDict(final File dictFile, final File newDictFile) {
|
||||
if (dictFile.isFile()) {
|
||||
return dictFile.renameTo(newDictFile);
|
||||
} else if (dictFile.isDirectory()) {
|
||||
final String dictName = dictFile.getName();
|
||||
final String newDictName = newDictFile.getName();
|
||||
if (newDictFile.exists()) {
|
||||
return false;
|
||||
}
|
||||
for (final File file : dictFile.listFiles()) {
|
||||
if (!file.isFile()) {
|
||||
continue;
|
||||
}
|
||||
final String fileName = file.getName();
|
||||
final String newFileName = fileName.replaceFirst(
|
||||
Pattern.quote(dictName), Matcher.quoteReplacement(newDictName));
|
||||
if (!file.renameTo(new File(dictFile, newFileName))) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return dictFile.renameTo(newDictFile);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@UsedForTesting
|
||||
public static boolean createEmptyDictFile(final String filePath, final long dictVersion,
|
||||
final Locale locale, final Map<String, String> attributeMap) {
|
||||
final String[] keyArray = new String[attributeMap.size()];
|
||||
final String[] valueArray = new String[attributeMap.size()];
|
||||
int index = 0;
|
||||
for (final String key : attributeMap.keySet()) {
|
||||
keyArray[index] = key;
|
||||
valueArray[index] = attributeMap.get(key);
|
||||
index++;
|
||||
}
|
||||
return createEmptyDictFileNative(filePath, dictVersion, locale.toString(), keyArray,
|
||||
valueArray);
|
||||
}
|
||||
|
||||
public static float calcNormalizedScore(final String before, final String after,
|
||||
final int score) {
|
||||
return calcNormalizedScoreNative(StringUtils.toCodePointArray(before),
|
||||
StringUtils.toCodePointArray(after), score);
|
||||
}
|
||||
|
||||
/**
|
||||
* Control the current time to be used in the native code. If currentTime >= 0, this method sets
|
||||
* the current time and gets into test mode.
|
||||
* In test mode, set timestamp is used as the current time in the native code.
|
||||
* If currentTime < 0, quit the test mode and returns to using time() to get the current time.
|
||||
*
|
||||
* @param currentTime seconds since the unix epoch
|
||||
* @return current time got in the native code.
|
||||
*/
|
||||
@UsedForTesting
|
||||
public static int setCurrentTimeForTest(final int currentTime) {
|
||||
return setCurrentTimeForTestNative(currentTime);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,107 @@
|
|||
/*
|
||||
* Copyright (C) 2014 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.android.inputmethod.latin.utils;
|
||||
|
||||
import android.util.Log;
|
||||
|
||||
import org.dslul.openboard.inputmethod.annotations.UsedForTesting;
|
||||
import org.dslul.openboard.inputmethod.latin.NgramContext;
|
||||
import org.dslul.openboard.inputmethod.latin.common.StringUtils;
|
||||
import org.dslul.openboard.inputmethod.latin.define.DecoderSpecificConstants;
|
||||
import org.dslul.openboard.inputmethod.latin.settings.SpacingAndPunctuations;
|
||||
import org.dslul.openboard.inputmethod.latin.utils.DictionaryInfoUtils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
// Note: this class is used as a parameter type of a native method. You should be careful when you
|
||||
// rename this class or field name. See BinaryDictionary#addMultipleDictionaryEntriesNative().
|
||||
public final class WordInputEventForPersonalization {
|
||||
private static final String TAG = WordInputEventForPersonalization.class.getSimpleName();
|
||||
private static final boolean DEBUG_TOKEN = false;
|
||||
|
||||
public final int[] mTargetWord;
|
||||
public final int mPrevWordsCount;
|
||||
public final int[][] mPrevWordArray =
|
||||
new int[DecoderSpecificConstants.MAX_PREV_WORD_COUNT_FOR_N_GRAM][];
|
||||
public final boolean[] mIsPrevWordBeginningOfSentenceArray =
|
||||
new boolean[DecoderSpecificConstants.MAX_PREV_WORD_COUNT_FOR_N_GRAM];
|
||||
// Time stamp in seconds.
|
||||
public final int mTimestamp;
|
||||
|
||||
@UsedForTesting
|
||||
public WordInputEventForPersonalization(final CharSequence targetWord,
|
||||
final NgramContext ngramContext, final int timestamp) {
|
||||
mTargetWord = StringUtils.toCodePointArray(targetWord);
|
||||
mPrevWordsCount = ngramContext.getPrevWordCount();
|
||||
ngramContext.outputToArray(mPrevWordArray, mIsPrevWordBeginningOfSentenceArray);
|
||||
mTimestamp = timestamp;
|
||||
}
|
||||
|
||||
// Process a list of words and return a list of {@link WordInputEventForPersonalization}
|
||||
// objects.
|
||||
public static ArrayList<WordInputEventForPersonalization> createInputEventFrom(
|
||||
final List<String> tokens, final int timestamp,
|
||||
final SpacingAndPunctuations spacingAndPunctuations, final Locale locale) {
|
||||
final ArrayList<WordInputEventForPersonalization> inputEvents = new ArrayList<>();
|
||||
final int N = tokens.size();
|
||||
NgramContext ngramContext = NgramContext.EMPTY_PREV_WORDS_INFO;
|
||||
for (int i = 0; i < N; ++i) {
|
||||
final String tempWord = tokens.get(i);
|
||||
if (StringUtils.isEmptyStringOrWhiteSpaces(tempWord)) {
|
||||
// just skip this token
|
||||
if (DEBUG_TOKEN) {
|
||||
Log.d(TAG, "--- isEmptyStringOrWhiteSpaces: \"" + tempWord + "\"");
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (!DictionaryInfoUtils.looksValidForDictionaryInsertion(
|
||||
tempWord, spacingAndPunctuations)) {
|
||||
if (DEBUG_TOKEN) {
|
||||
Log.d(TAG, "--- not looksValidForDictionaryInsertion: \""
|
||||
+ tempWord + "\"");
|
||||
}
|
||||
// Sentence terminator found. Split.
|
||||
// TODO: Detect whether the context is beginning-of-sentence.
|
||||
ngramContext = NgramContext.EMPTY_PREV_WORDS_INFO;
|
||||
continue;
|
||||
}
|
||||
if (DEBUG_TOKEN) {
|
||||
Log.d(TAG, "--- word: \"" + tempWord + "\"");
|
||||
}
|
||||
final WordInputEventForPersonalization inputEvent =
|
||||
detectWhetherVaildWordOrNotAndGetInputEvent(
|
||||
ngramContext, tempWord, timestamp, locale);
|
||||
if (inputEvent == null) {
|
||||
continue;
|
||||
}
|
||||
inputEvents.add(inputEvent);
|
||||
ngramContext = ngramContext.getNextNgramContext(new NgramContext.WordInfo(tempWord));
|
||||
}
|
||||
return inputEvents;
|
||||
}
|
||||
|
||||
private static WordInputEventForPersonalization detectWhetherVaildWordOrNotAndGetInputEvent(
|
||||
final NgramContext ngramContext, final String targetWord, final int timestamp,
|
||||
final Locale locale) {
|
||||
if (locale == null) {
|
||||
return null;
|
||||
}
|
||||
return new WordInputEventForPersonalization(targetWord, ngramContext, timestamp);
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue